#Include all the modules we need for this project
import numpy as np
import scipy.stats as stats
import matplotlib.pyplot as plt
%matplotlib inline
import pandas as pd
import random as rnd
import seaborn as sns
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
from sklearn import svm
from scipy.stats import zscore
from sklearn import metrics
from sklearn.impute import SimpleImputer
import math
from sklearn.linear_model import LogisticRegression
from sklearn.utils import resample
from sklearn import svm
import xgboost as xgb
from sklearn.metrics import precision_score, recall_score, accuracy_score
from sklearn.naive_bayes import GaussianNB # using Gaussian algorithm from Naive Bayes
import pickle
from sklearn.cluster import KMeans
from scipy.spatial.distance import cdist
from sklearn.decomposition import PCA
from imblearn.over_sampling import SMOTE
from sklearn.model_selection import KFold, cross_val_score, LeaveOneOut, GridSearchCV, RandomizedSearchCV
from scipy.stats import randint as sp_randint
from sklearn.ensemble import RandomForestClassifier
from sklearn.pipeline import Pipeline, make_pipeline
from sklearn.tree import DecisionTreeClassifier
from scipy.stats import randint
# !pip install tensorflow
import tensorflow as tf
import keras.layers as layers
import keras.models as models
from tensorflow.keras import losses
from tensorflow.keras import optimizers, regularizers
from tensorflow.keras import Sequential, Model
from tensorflow.keras.applications import MobileNetV2, VGG19, VGG16
from tensorflow.keras.losses import binary_crossentropy
from tensorflow.keras.backend import log
from tensorflow import reduce_sum
from tensorflow.keras.backend import epsilon
from tensorflow.keras.layers import AveragePooling2D, Activation, Dense, GlobalAveragePooling2D, BatchNormalization, Dropout, Input, concatenate, UpSampling2D, Reshape, Conv2DTranspose, Concatenate
from tensorflow.keras.utils import to_categorical, image_dataset_from_directory
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping, ReduceLROnPlateau
from imblearn.over_sampling import SMOTE
import tensorflow_datasets as tfds
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout
import cv2
import os
# !pip install keras-facenet
# from keras_facenet import FaceNet
from google.colab.patches import cv2_imshow
!pip install pydicom
import pydicom as dicom
from sklearn import preprocessing
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
from tensorflow.keras.applications.vgg16 import VGG16
from tensorflow.keras.applications.resnet50 import ResNet50
from tensorflow.keras.applications.inception_resnet_v2 import InceptionResNetV2
Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/
Collecting pydicom
Downloading pydicom-2.3.1-py3-none-any.whl (2.0 MB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 2.0/2.0 MB 60.8 MB/s eta 0:00:00
Installing collected packages: pydicom
Successfully installed pydicom-2.3.1
#Mount the Google drive
from google.colab import drive
drive.mount('/content/drive')
#when downloading Project data to Mac, the zip got copied as a folder. Nothing further needed to import data.
images_path = "/content/drive/MyDrive/Colab Notebooks/Capstone - Pneumonia/10. Capstone - Pnemonia"
#Setup the csv files into dataframes - need to read only stage_2_train_labels.csv and stage_2_details_class_info.csv
df1A = pd.read_csv(images_path + "/stage_2_train_labels.csv")
display(df1A.head(10))
print("Train_labels shape :", df1A.shape)
print("Train labels: Unique patientIds we have: ", df1A['patientId'].nunique())
tmp = df1A[df1A.duplicated(['patientId', 'x', 'y', 'width', 'height', 'Target'])]
print("Train labels: pure duplicates (all rows same) : ", tmp.shape[0])
#there are multiple entries for the same patientId with different image coordinates. these are diff. images possibly, so keep them
#eg. df1A[df1A['patientId'] == '00704310-78a8-4b38-8475-49f4573b2dbb'] there are two different entries.
df1B = pd.read_csv(images_path + "/stage_2_detailed_class_info.csv")
display(df1B.head(10))
print("Detailed_class_info shape ", df1B.shape)
print("Detailed_class_info: Unique patientIds we have: ", df1B['patientId'].nunique())
tmp = df1B[df1B.duplicated(['patientId', 'class'])]
print("Detailed_class_info: pure duplicates (all rows same) : ", tmp.shape[0])
#here though there seem to be duplicates, but they are not. When matched to df1A, they will be separate entries
#eg. df1B[df1B['patientId'] == '00704310-78a8-4b38-8475-49f4573b2dbb']
#do below to merge - but this is punching holes in the index sequence
# df1 = df1A.merge(df1B, on='patientId')
# df1 = df1.drop_duplicates()
# display(df1.head(10))
# print("merged DF shape ", df1.shape)
if (df1A['patientId'] == df1B['patientId']).sum() == df1A.shape[0]:
print("patientId are same in both dataframes.")
#Include the class column into the first dataframe
df1A['class'] = df1B['class']
print("Merged dataframe")
display(df1A.head(10))
df1A.shape
Mounted at /content/drive
| patientId | x | y | width | height | Target | |
|---|---|---|---|---|---|---|
| 0 | 0004cfab-14fd-4e49-80ba-63a80b6bddd6 | NaN | NaN | NaN | NaN | 0 |
| 1 | 00313ee0-9eaa-42f4-b0ab-c148ed3241cd | NaN | NaN | NaN | NaN | 0 |
| 2 | 00322d4d-1c29-4943-afc9-b6754be640eb | NaN | NaN | NaN | NaN | 0 |
| 3 | 003d8fa0-6bf1-40ed-b54c-ac657f8495c5 | NaN | NaN | NaN | NaN | 0 |
| 4 | 00436515-870c-4b36-a041-de91049b9ab4 | 264.0 | 152.0 | 213.0 | 379.0 | 1 |
| 5 | 00436515-870c-4b36-a041-de91049b9ab4 | 562.0 | 152.0 | 256.0 | 453.0 | 1 |
| 6 | 00569f44-917d-4c86-a842-81832af98c30 | NaN | NaN | NaN | NaN | 0 |
| 7 | 006cec2e-6ce2-4549-bffa-eadfcd1e9970 | NaN | NaN | NaN | NaN | 0 |
| 8 | 00704310-78a8-4b38-8475-49f4573b2dbb | 323.0 | 577.0 | 160.0 | 104.0 | 1 |
| 9 | 00704310-78a8-4b38-8475-49f4573b2dbb | 695.0 | 575.0 | 162.0 | 137.0 | 1 |
Train_labels shape : (30227, 6) Train labels: Unique patientIds we have: 26684 Train labels: pure duplicates (all rows same) : 0
| patientId | class | |
|---|---|---|
| 0 | 0004cfab-14fd-4e49-80ba-63a80b6bddd6 | No Lung Opacity / Not Normal |
| 1 | 00313ee0-9eaa-42f4-b0ab-c148ed3241cd | No Lung Opacity / Not Normal |
| 2 | 00322d4d-1c29-4943-afc9-b6754be640eb | No Lung Opacity / Not Normal |
| 3 | 003d8fa0-6bf1-40ed-b54c-ac657f8495c5 | Normal |
| 4 | 00436515-870c-4b36-a041-de91049b9ab4 | Lung Opacity |
| 5 | 00436515-870c-4b36-a041-de91049b9ab4 | Lung Opacity |
| 6 | 00569f44-917d-4c86-a842-81832af98c30 | No Lung Opacity / Not Normal |
| 7 | 006cec2e-6ce2-4549-bffa-eadfcd1e9970 | No Lung Opacity / Not Normal |
| 8 | 00704310-78a8-4b38-8475-49f4573b2dbb | Lung Opacity |
| 9 | 00704310-78a8-4b38-8475-49f4573b2dbb | Lung Opacity |
Detailed_class_info shape (30227, 2) Detailed_class_info: Unique patientIds we have: 26684 Detailed_class_info: pure duplicates (all rows same) : 3543 patientId are same in both dataframes. Merged dataframe
| patientId | x | y | width | height | Target | class | |
|---|---|---|---|---|---|---|---|
| 0 | 0004cfab-14fd-4e49-80ba-63a80b6bddd6 | NaN | NaN | NaN | NaN | 0 | No Lung Opacity / Not Normal |
| 1 | 00313ee0-9eaa-42f4-b0ab-c148ed3241cd | NaN | NaN | NaN | NaN | 0 | No Lung Opacity / Not Normal |
| 2 | 00322d4d-1c29-4943-afc9-b6754be640eb | NaN | NaN | NaN | NaN | 0 | No Lung Opacity / Not Normal |
| 3 | 003d8fa0-6bf1-40ed-b54c-ac657f8495c5 | NaN | NaN | NaN | NaN | 0 | Normal |
| 4 | 00436515-870c-4b36-a041-de91049b9ab4 | 264.0 | 152.0 | 213.0 | 379.0 | 1 | Lung Opacity |
| 5 | 00436515-870c-4b36-a041-de91049b9ab4 | 562.0 | 152.0 | 256.0 | 453.0 | 1 | Lung Opacity |
| 6 | 00569f44-917d-4c86-a842-81832af98c30 | NaN | NaN | NaN | NaN | 0 | No Lung Opacity / Not Normal |
| 7 | 006cec2e-6ce2-4549-bffa-eadfcd1e9970 | NaN | NaN | NaN | NaN | 0 | No Lung Opacity / Not Normal |
| 8 | 00704310-78a8-4b38-8475-49f4573b2dbb | 323.0 | 577.0 | 160.0 | 104.0 | 1 | Lung Opacity |
| 9 | 00704310-78a8-4b38-8475-49f4573b2dbb | 695.0 | 575.0 | 162.0 | 137.0 | 1 | Lung Opacity |
(30227, 7)
#Training data mapping
#Important data from mentor session - add Age, Sex into the input feature set
# def pullSexAge(rowData):
# dcm_file = images_path+'/stage_2_train_images/'+'{}.dcm'.format(rowData.patientId)
# dcm_data = dicom.dcmread(dcm_file)
# return dcm_data.PatientSex,dcm_data.PatientAge
# df1A['sex'], df1A['age'] = zip(*df1A.apply(pullSexAge, axis=1)) #we are not using this really, but good for visualization
# display(df1A.head(5))
#below code executed once and pickle'd on drive to avoid high execution time parsing all the image files
# X = []
# Y = []
# fn = []
# cnt = 0
# dir_name = images_path + "/stage_2_train_images"
# for file_train in os.listdir(dir_name):
# cnt = cnt + 1
# # print("File count ", cnt)
# f = os.path.join(dir_name, file_train)
# if os.path.isfile(f):
# f_dash = os.path.splitext(file_train)[0]
# if(df1A[df1A['patientId'] == f_dash].shape[0] != 0):
# dicom_file = dicom.dcmread(f)
# #put the image into the list X
# X.append(cv2.resize(dicom_file.pixel_array, (256,256)))
# #put the target variable in the list Y
# Y.append(df1A[df1A['patientId'] == f_dash].iloc[0][6])
# fn.append(file_train)
# else:
# print("Ignoring file ", f_dash)
# # if cnt == 10:
# # break;
# X = np.array(X)
# Y = np.array(Y)
# # review the shape of the input and target arrays we have put together
# display(X.shape)
# display(Y.shape)
# with open(images_path + 'outfile_X_Train', 'wb') as fp:
# pickle.dump(X, fp)
# with open(images_path + 'outfile_Y_Train', 'wb') as fp:
# pickle.dump(Y, fp)
# with open(images_path + 'outfile_fn_Train', 'wb') as fp:
# pickle.dump(fn, fp)
with open (images_path + 'outfile_X_Train', 'rb') as fp:
X = pickle.load(fp)
with open (images_path + 'outfile_Y_Train', 'rb') as fp:
Y = pickle.load(fp)
with open (images_path + 'outfile_fn_Train', 'rb') as fp:
fn = pickle.load(fp)
X = np.expand_dims(X, axis=-1) #need to do this for CNN input layer
#target variable needs label encoding
le = preprocessing.LabelEncoder()
Y_le = le.fit_transform(Y) #the 'Normal' series values map to the index appended in y_pred_final
display(list(le.classes_))
Y_dash = pd.DataFrame({'target': Y_le}, columns=['target'])
# Y_dash.loc[Y_dash['target'] == 'No Lung Opacity / Not Normal'] = 'Normal' #do this to get to binary classification
Y_dash = pd.get_dummies(Y_dash['target'])
print("Train img shape ", X.shape)
print("Train target shape ", Y_dash.shape)
display(Y_dash.head())
display(Y_dash.value_counts())
#Train images are now mapped to the classes - X and Y_dash.
['Lung Opacity', 'No Lung Opacity / Not Normal', 'Normal']
Train img shape (26684, 256, 256, 1) Train target shape (26684, 3)
| 0 | 1 | 2 | |
|---|---|---|---|
| 0 | 0 | 0 | 1 |
| 1 | 0 | 1 | 0 |
| 2 | 0 | 0 | 1 |
| 3 | 0 | 0 | 1 |
| 4 | 0 | 0 | 1 |
0 1 2 0 1 0 11821 0 1 8851 1 0 0 6012 dtype: int64
#Test images - no labels provided. Need to assume they are for placing bounding box (milestone 2)
df2 = pd.read_csv(images_path + "/stage_2_sample_submission.csv")
display(df2.head(10))
display(df2.shape)
#below code executed only once and pickle'd, to avoid high execution times
# X_t = []
# Y_t = []
# fn_t = []
# cnt = 0
# unknown = 0
# dir_name = images_path + "/stage_2_test_images"
# for file_train in os.listdir(dir_name):
# # print("File count ", cnt)
# f = os.path.join(dir_name, file_train)
# if os.path.isfile(f):
# cnt = cnt + 1
# f_dash = os.path.splitext(file_train)[0]
# dicom_file = dicom.dcmread(f)
# #put the image into the list X
# X_t.append(cv2.resize(dicom_file.pixel_array, (256,256)))
# fn_t.append(file_train)
# if(df2[df2['patientId'] == f_dash].shape[0] != 0):
# #put the target variable in the list Y as Pnemonia
# Y_t.append('Lung Opacity')
# else:
# #we dont know anything about this one.
# Y_t.append('Unknown')
# print("Count ", cnt, ": Not in csv list: ", f_dash, ". Putting as 'unknown'")
# unknown = unknown+1
# # if cnt == 10:
# # break;
# print('Unknown images (not in sample_sbumission csv) : ', unknown)
# X_t = np.array(X_t)
# Y_t = np.array(Y_t)
# with open(images_path + 'outfile_X_Test', 'wb') as fp:
# pickle.dump(X_t, fp)
# with open(images_path + 'outfile_Y_Test', 'wb') as fp:
# pickle.dump(Y_t, fp)
# with open(images_path + 'outfile_fn_Test', 'wb') as fp:
# pickle.dump(fn_t, fp)
with open (images_path + 'outfile_X_Test', 'rb') as fp:
X_t = pickle.load(fp)
with open (images_path + 'outfile_Y_Test', 'rb') as fp:
Y_t = pickle.load(fp)
with open (images_path + 'outfile_fn_Test', 'rb') as fp:
fn_t = pickle.load(fp)
X_t = np.expand_dims(X_t, axis=-1)
Y_dash_t = pd.DataFrame({'target': Y_t}, columns=['target'])
Y_dash_t = pd.get_dummies(Y_dash_t['target']) #this is not correct, should be ignored
fn_dash_t = pd.DataFrame({'filename': fn_t}, columns=['filename'])
print("Test img shape ", X_t.shape)
print("Test target shape ", Y_dash_t.shape)
display(fn_dash_t.head())
#to be double-sure, just check if test images are duplicated/listed in training folder image list.
ext_cnt = 0
for x in fn_dash_t['filename']:
if x in fn: #checking if the test images are in the training set image list
ext_cnt = ext_cnt + 1
print (x, " Exists")
print("No. of test image filenames that are available in training data folder ", ext_cnt)
#Test data - Images mapped into X_t, but labels unavailable for classification.
#we will need to take test images from the training set itself.
| patientId | PredictionString | |
|---|---|---|
| 0 | 0000a175-0e68-4ca4-b1af-167204a7e0bc | 0.5 0 0 100 100 |
| 1 | 0005d3cc-3c3f-40b9-93c3-46231c3eb813 | 0.5 0 0 100 100 |
| 2 | 000686d7-f4fc-448d-97a0-44fa9c5d3aa6 | 0.5 0 0 100 100 |
| 3 | 000e3a7d-c0ca-4349-bb26-5af2d8993c3d | 0.5 0 0 100 100 |
| 4 | 00100a24-854d-423d-a092-edcf6179e061 | 0.5 0 0 100 100 |
| 5 | 0015597f-2d69-4bc7-b642-5b5e01534676 | 0.5 0 0 100 100 |
| 6 | 001b0c51-c7b3-45c1-9c17-fa7594cab96e | 0.5 0 0 100 100 |
| 7 | 0022bb50-bf6c-4185-843e-403a9cc1ea80 | 0.5 0 0 100 100 |
| 8 | 00271e8e-aea8-4f0a-8a34-3025831f1079 | 0.5 0 0 100 100 |
| 9 | 0028450f-5b8e-4695-9416-8340b6f686b0 | 0.5 0 0 100 100 |
(3000, 2)
Test img shape (3026, 256, 256, 1) Test target shape (3026, 2)
| filename | |
|---|---|
| 0 | 257f3de7-2f8f-4dba-a0e7-37fc3e6cc018.dcm |
| 1 | 252437ed-a4e2-4743-995c-65a4bb133996.dcm |
| 2 | 268116a6-2304-4316-b5b3-3073fc5467b1.dcm |
| 3 | 264607e7-a410-4519-b0df-6e5db64c373e.dcm |
| 4 | 2664366f-4f04-49e1-ab20-19b9173f23bc.dcm |
No. of test image filenames that are available in training data folder 0
#Test annotations are unclear - all images have same bounding box.. and all have pnemonia?
#train data, already has the annotations mapped
#split the data to test and training sets
X_train, X_test, Y_train, Y_test = train_test_split(X, Y_dash, test_size=0.2, random_state=42)
X_train_rgb = np.repeat(X_train[..., np.newaxis], 3, -1)
X_train_rgb = np.squeeze(X_train_rgb)
X_test_rgb = np.repeat(X_test[..., np.newaxis], 3, -1)
X_test_rgb = np.squeeze(X_test_rgb)
print(X_train_rgb.shape)
print(X_test_rgb.shape)
(21347, 256, 256, 3) (5337, 256, 256, 3)
#preprocessing
# df1A['age'] = df1A.age.astype(int)
#image size reduced to 256x256 already during Step 1
display(df1A.head())
display(df1B.head())
print('Total No of Patients in Class Info', df1B['patientId'].value_counts().shape[0])
print('Total distinct classes: ', df1B['class'].unique())
##Identify duplicates records in the data
dupli = df1B['patientId'].duplicated()
print("Duplicate entries (with more than 1 BB) ", sum(dupli))
df1B.groupby('class').size().plot.bar(5, 10, color=['Orange', 'green', 'Indigo'])
df1A['x'] = df1A['x'].replace(np.nan, 0)
df1A['y'] = df1A['y'].replace(np.nan, 0)
df1A['width'] = df1A['width'].replace(np.nan, 0)
df1A['height'] = df1A['height'].replace(np.nan, 0)
print("\nUpdated data samples:")
display(df1A.head())
print(" \nCount total NaN at each column in the dataset : \n\n",
df1A.isnull().sum())
print("Dataframe Info: \n")
display(df1A.info())
print("Dataframe describe (numeric data): \n")
display(df1A.describe())
print('Lets check the distribution of `Target` and `class` column'); print('--'*40)
fig = plt.figure(figsize = (10, 6))
ax = fig.add_subplot(121)
g = (df1A['Target'].value_counts()
.plot(kind = 'pie', autopct = '%.0f%%',
labels = ['Negative', 'Pneumonia Evidence'],
colors = ['green', 'red'],
startangle = 90,
title = 'Distribution of Target', fontsize = 12)
.set_ylabel(''))
ax = fig.add_subplot(122)
g = (df1B['class'].value_counts().sort_index(ascending = False)
.plot(kind = 'pie', autopct = '%.0f%%',
colors = ['green', 'orange', 'red'],
startangle = 90, title = 'Distribution of Class',
fontsize = 12)
.set_ylabel(''))
plt.tight_layout()
| patientId | x | y | width | height | Target | class | |
|---|---|---|---|---|---|---|---|
| 0 | 0004cfab-14fd-4e49-80ba-63a80b6bddd6 | 0.0 | 0.0 | 0.0 | 0.0 | 0 | No Lung Opacity / Not Normal |
| 1 | 00313ee0-9eaa-42f4-b0ab-c148ed3241cd | 0.0 | 0.0 | 0.0 | 0.0 | 0 | No Lung Opacity / Not Normal |
| 2 | 00322d4d-1c29-4943-afc9-b6754be640eb | 0.0 | 0.0 | 0.0 | 0.0 | 0 | No Lung Opacity / Not Normal |
| 3 | 003d8fa0-6bf1-40ed-b54c-ac657f8495c5 | 0.0 | 0.0 | 0.0 | 0.0 | 0 | Normal |
| 4 | 00436515-870c-4b36-a041-de91049b9ab4 | 264.0 | 152.0 | 213.0 | 379.0 | 1 | Lung Opacity |
| patientId | class | |
|---|---|---|
| 0 | 0004cfab-14fd-4e49-80ba-63a80b6bddd6 | No Lung Opacity / Not Normal |
| 1 | 00313ee0-9eaa-42f4-b0ab-c148ed3241cd | No Lung Opacity / Not Normal |
| 2 | 00322d4d-1c29-4943-afc9-b6754be640eb | No Lung Opacity / Not Normal |
| 3 | 003d8fa0-6bf1-40ed-b54c-ac657f8495c5 | Normal |
| 4 | 00436515-870c-4b36-a041-de91049b9ab4 | Lung Opacity |
Total No of Patients in Class Info 26684 Total distinct classes: ['No Lung Opacity / Not Normal' 'Normal' 'Lung Opacity'] Duplicate entries (with more than 1 BB) 3543 Updated data samples:
| patientId | x | y | width | height | Target | class | |
|---|---|---|---|---|---|---|---|
| 0 | 0004cfab-14fd-4e49-80ba-63a80b6bddd6 | 0.0 | 0.0 | 0.0 | 0.0 | 0 | No Lung Opacity / Not Normal |
| 1 | 00313ee0-9eaa-42f4-b0ab-c148ed3241cd | 0.0 | 0.0 | 0.0 | 0.0 | 0 | No Lung Opacity / Not Normal |
| 2 | 00322d4d-1c29-4943-afc9-b6754be640eb | 0.0 | 0.0 | 0.0 | 0.0 | 0 | No Lung Opacity / Not Normal |
| 3 | 003d8fa0-6bf1-40ed-b54c-ac657f8495c5 | 0.0 | 0.0 | 0.0 | 0.0 | 0 | Normal |
| 4 | 00436515-870c-4b36-a041-de91049b9ab4 | 264.0 | 152.0 | 213.0 | 379.0 | 1 | Lung Opacity |
Count total NaN at each column in the dataset : patientId 0 x 0 y 0 width 0 height 0 Target 0 class 0 dtype: int64 Dataframe Info: <class 'pandas.core.frame.DataFrame'> RangeIndex: 30227 entries, 0 to 30226 Data columns (total 7 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 patientId 30227 non-null object 1 x 30227 non-null float64 2 y 30227 non-null float64 3 width 30227 non-null float64 4 height 30227 non-null float64 5 Target 30227 non-null int64 6 class 30227 non-null object dtypes: float64(4), int64(1), object(2) memory usage: 1.6+ MB
None
Dataframe describe (numeric data):
| x | y | width | height | Target | |
|---|---|---|---|---|---|
| count | 30227.000000 | 30227.000000 | 30227.000000 | 30227.000000 | 30227.000000 |
| mean | 124.561683 | 115.960962 | 69.060575 | 104.084825 | 0.316108 |
| std | 216.326397 | 190.012883 | 106.910496 | 176.932152 | 0.464963 |
| min | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 |
| 25% | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 |
| 50% | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 |
| 75% | 193.000000 | 231.000000 | 169.000000 | 188.000000 | 1.000000 |
| max | 835.000000 | 881.000000 | 528.000000 | 942.000000 | 1.000000 |
Lets check the distribution of `Target` and `class` column --------------------------------------------------------------------------------
box_patient_df = df1A.groupby('patientId').size().reset_index(name='boxes')
box_patient_df.groupby('boxes').size().reset_index(name='patients')
print('Let\'s also check whether each patientId has only one type of class'); print('--'*40)
print('Yes, each patientId is associated with only {} class'.format(df1B.groupby(['patientId'])['class'].nunique().max()))
# Merge the two dataframes
train_class_df = pd.concat([df1A, df1B['class']], axis = 1)
print('Shape of the dataset after the merge: {}'.format(train_class_df.shape))
def checkXray(i, dirName):
patientId = train_class_df['patientId'][i]
print("Patient Id: ", patientId)
fileName = dirName + "/" + patientId
print("\nBounding Box Coordinates, X: ", train_class_df['x'][i])
print("\nBounding Box Coordinates, Y: ", train_class_df['y'][i])
print("\nBounding Box Coordinates, Width: ", train_class_df['width'][i])
print("\nBounding Box Coordinates, Height: ", train_class_df['height'][i])
patient_file = '%s.dcm' % fileName
patient_data = dicom.read_file(patient_file)
print(patient_data)
plt.imshow(patient_data.pixel_array,cmap=plt.cm.gist_gray)
TRAIN_IMG_DCM=images_path + '/stage_2_train_images'
# checkXray(3, TRAIN_IMG_DCM)
checkXray(4, TRAIN_IMG_DCM)
#visualizations
Let's also check whether each patientId has only one type of class -------------------------------------------------------------------------------- Yes, each patientId is associated with only 1 class Shape of the dataset after the merge: (30227, 8) Patient Id: 00436515-870c-4b36-a041-de91049b9ab4 Bounding Box Coordinates, X: 264.0 Bounding Box Coordinates, Y: 152.0 Bounding Box Coordinates, Width: 213.0 Bounding Box Coordinates, Height: 379.0 Dataset.file_meta ------------------------------- (0002, 0000) File Meta Information Group Length UL: 200 (0002, 0001) File Meta Information Version OB: b'\x00\x01' (0002, 0002) Media Storage SOP Class UID UI: Secondary Capture Image Storage (0002, 0003) Media Storage SOP Instance UID UI: 1.2.276.0.7230010.3.1.4.8323329.6379.1517874325.469569 (0002, 0010) Transfer Syntax UID UI: JPEG Baseline (Process 1) (0002, 0012) Implementation Class UID UI: 1.2.276.0.7230010.3.0.3.6.0 (0002, 0013) Implementation Version Name SH: 'OFFIS_DCMTK_360' ------------------------------------------------- (0008, 0005) Specific Character Set CS: 'ISO_IR 100' (0008, 0016) SOP Class UID UI: Secondary Capture Image Storage (0008, 0018) SOP Instance UID UI: 1.2.276.0.7230010.3.1.4.8323329.6379.1517874325.469569 (0008, 0020) Study Date DA: '19010101' (0008, 0030) Study Time TM: '000000.00' (0008, 0050) Accession Number SH: '' (0008, 0060) Modality CS: 'CR' (0008, 0064) Conversion Type CS: 'WSD' (0008, 0090) Referring Physician's Name PN: '' (0008, 103e) Series Description LO: 'view: AP' (0010, 0010) Patient's Name PN: '00436515-870c-4b36-a041-de91049b9ab4' (0010, 0020) Patient ID LO: '00436515-870c-4b36-a041-de91049b9ab4' (0010, 0030) Patient's Birth Date DA: '' (0010, 0040) Patient's Sex CS: 'F' (0010, 1010) Patient's Age AS: '32' (0018, 0015) Body Part Examined CS: 'CHEST' (0018, 5101) View Position CS: 'AP' (0020, 000d) Study Instance UID UI: 1.2.276.0.7230010.3.1.2.8323329.6379.1517874325.469568 (0020, 000e) Series Instance UID UI: 1.2.276.0.7230010.3.1.3.8323329.6379.1517874325.469567 (0020, 0010) Study ID SH: '' (0020, 0011) Series Number IS: '1' (0020, 0013) Instance Number IS: '1' (0020, 0020) Patient Orientation CS: '' (0028, 0002) Samples per Pixel US: 1 (0028, 0004) Photometric Interpretation CS: 'MONOCHROME2' (0028, 0010) Rows US: 1024 (0028, 0011) Columns US: 1024 (0028, 0030) Pixel Spacing DS: [0.139, 0.139] (0028, 0100) Bits Allocated US: 8 (0028, 0101) Bits Stored US: 8 (0028, 0102) High Bit US: 7 (0028, 0103) Pixel Representation US: 0 (0028, 2110) Lossy Image Compression CS: '01' (0028, 2114) Lossy Image Compression Method CS: 'ISO_10918_1' (7fe0, 0010) Pixel Data OB: Array of 119382 elements
# Helper function to get additional features from dicom images
from glob import glob
from tqdm.notebook import tqdm_notebook
def get_tags(data, path):
images = os.listdir(path)
for i, name in tqdm_notebook(enumerate(images)):
# print('get_tags: ', i)
img_path = os.path.join(path, name)
img_data = dicom.read_file(img_path)
idx = (data['patientId'] == img_data.PatientID)
data.loc[idx,'PatientSex'] = img_data.PatientSex
data.loc[idx,'PatientAge'] = pd.to_numeric(img_data.PatientAge)
data.loc[idx,'BodyPartExamined'] = img_data.BodyPartExamined
data.loc[idx,'ViewPosition'] = img_data.ViewPosition
data.loc[idx,'Modality'] = img_data.Modality
# get_tags(train_class_df, TRAIN_IMG_DCM)
# train_class_df.to_pickle(images_path + '/train_feature_engineered.pkl')
train_class_df = pd.read_pickle(images_path + '/../train_feature_engineered.pkl')
print('Read the training images file names and path'); print('--'*40)
images = pd.DataFrame({'path': glob(os.path.join(TRAIN_IMG_DCM, '*.dcm'))})
images['patientId'] = images['path'].map(lambda x: os.path.splitext(os.path.basename(x))[0])
print('Number of images in the training folder: {}'.format(images.shape[0]))
print('Columns in the training images dataframe: {}'.format(list(images.columns)))
# assert images.shape[0] == len(list(set(train_class_df['patientId']))), 'Number of training images should be equal to the unique patientIds we have'
print('Merge path from the `images` dataframe with `train_class` dataframe'); print('--'*40)
train_class_df = train_class_df.merge(images, on = 'patientId', how = 'left')
print('Shape of the `train_class` dataframe after merge: {}'.format(train_class_df.shape))
train_class_df.head()
display(train_class_df.shape)
print('As expected unique in `BodyPartExamined` is: {}'.format(train_class_df['BodyPartExamined'].unique()[0]))
print('Unique in `Modality` is: {}'.format(train_class_df['Modality'].unique()[0])); print('--'*40)
Read the training images file names and path -------------------------------------------------------------------------------- Number of images in the training folder: 26811 Columns in the training images dataframe: ['path', 'patientId'] Merge path from the `images` dataframe with `train_class` dataframe -------------------------------------------------------------------------------- Shape of the `train_class` dataframe after merge: (30227, 15)
(30227, 15)
As expected unique in `BodyPartExamined` is: CHEST Unique in `Modality` is: CR --------------------------------------------------------------------------------
from matplotlib.patches import Rectangle
print('Overall the distribution is almost equal for `ViewPosition` but where there\'s a Pneumonia Evidence, `ViewPosition` is `AP`')
print('AP: Anterior/Posterior, PA: Posterior/Anterior'); print('--'*40)
fig = plt.figure(figsize = (10, 6))
ax = fig.add_subplot(121)
g = (train_class_df['ViewPosition'].value_counts()
.plot(kind = 'pie', autopct = '%.0f%%',
startangle = 90,
title = 'Distribution of ViewPosition, Overall',
fontsize = 12)
.set_ylabel(''))
ax = fig.add_subplot(122)
g = (train_class_df.loc[train_class_df['Target'] == 1, 'ViewPosition']
.value_counts().sort_index(ascending = False)
.plot(kind = 'pie', autopct = '%.0f%%',
startangle = 90, counterclock = False,
title = 'Distribution of ViewPosition, Pneumonia Evidence',
fontsize = 12)
.set_ylabel(''))
print('Plot x and y centers of bounding boxes'); print('--'*40)
# Creating a dataframe with columns for center of the rectangles
bboxes = train_class_df[train_class_df['Target'] == 1]
bboxes['xw'] = bboxes['x'] + bboxes['width'] / 2
bboxes['yh'] = bboxes['y'] + bboxes['height'] / 2
g = sns.jointplot(x = bboxes['xw'], y = bboxes['yh'], data = bboxes,
kind = 'hex', alpha = 0.5,)
plt.suptitle('Bounding Boxes Location, Pneumonia Evidence')
plt.tight_layout()
plt.subplots_adjust(top = 0.95)
plt.show()
# Helper function to plot bboxes scatter
# Reference for this function & plots: https://www.kaggle.com/gpreda/rsna-pneumonia-detection-eda
def bboxes_scatter(df1, df2, text1, text2):
fig, (ax1, ax2) = plt.subplots(1, 2, figsize = (13, 8))
fig.subplots_adjust(top = 0.85)
fig.suptitle('Plotting centers of lung opacity\n{} & {}'.format(text1, text2))
df1.plot.scatter(x = 'xw', y = 'yh', ax = ax1, alpha = 0.8, marker = '.',
xlim = (0, 1024), ylim = (0, 1024), color = 'green')
ax1.set_title('Centers of Lung Opacity\n{}'.format(text1))
for i, row in df1.iterrows():
ax1.add_patch(Rectangle(xy = (row['x'], row['y']),
width = row['width'], height = row['height'],
alpha = 3.5e-3, color = 'yellow'))
plt.title('Centers of Lung Opacity\n{}'.format(text2))
df2.plot.scatter(x = 'xw', y = 'yh', ax = ax2, alpha = 0.8, marker = '.',
color = 'brown', xlim = (0, 1024), ylim = (0, 1024))
ax2.set_title('Centers of Lung Opacity\n{}'.format(text2))
for i, row in df2.iterrows():
ax2.add_patch(Rectangle(xy = (row['x'], row['y']),
width = row['width'], height = row['height'],
alpha = 3.5e-3,
color = 'yellow'))
plt.show()
print('Exploring the bounding boxes centers for `ViewPositions` for random sample = 1000')
df1 = bboxes[bboxes['ViewPosition'] == 'PA'].sample(1000)
df2 = bboxes[bboxes['ViewPosition'] == 'AP'].sample(1000)
bboxes_scatter(df1, df2, 'View Position = PA', 'View Position = AP')
print('Checking outliers in `PatientAge'); print('--'*40)
print('Minimum `PatientAge` in the training dataset: {}'.format(train_class_df['PatientAge'].min()))
print('Maximum `PatientAge` in the training dataset: {}'.format(train_class_df['PatientAge'].max()))
print('75th Percentile of `PatientAge` in the training dataset: {}'.format(train_class_df['PatientAge'].quantile(0.75)))
print('`PatientAge` in upper whisker for box plot: {}'.format(train_class_df['PatientAge'].quantile(0.75) + (train_class_df['PatientAge'].quantile(0.75) - train_class_df['PatientAge'].quantile(0.25))))
print()
fig = plt.figure(figsize = (10, 6))
ax = sns.boxplot(data = train_class_df['PatientAge'], orient = 'h').set_title('Outliers in PatientAge')
print('Using pd.clip to set upper threshold of 100 for age and remove outliers'); print('--'*40)
train_class_df['PatientAge'] = train_class_df['PatientAge'].clip(train_class_df['PatientAge'].min(), 100)
train_class_df['PatientAge'].describe().astype(int)
print('Get the distribution of `PatientAge` overall and where Target = 1'); print('--'*40)
fig = plt.figure(figsize = (10, 6))
ax = fig.add_subplot(121)
g = (sns.histplot(train_class_df['PatientAge'])
.set_title('Distribution of PatientAge, Overall'))
ax = fig.add_subplot(122)
g = (sns.histplot(train_class_df.loc[train_class_df['Target'] == 1, 'PatientAge'])
.set_title('Distribution of PatientAge, Pneumonia Evidence'))
print('Creating Age Binning field', '--'*40)
train_class_df['AgeBins'] = pd.cut(train_class_df['PatientAge'], bins = 4, precision = 0, labels = ['<=26', '<=50', '<=75', '<=100'])
train_class_df['AgeBins'].value_counts()
print('Value counts of the age bin field created'); print('--'*40)
display(pd.concat([train_class_df['AgeBins'].value_counts().sort_index().rename('Counts of Age Bins, Overall'),
train_class_df.loc[train_class_df['Target'] == 1, 'AgeBins'].value_counts().sort_index().rename('Counts of Age Bins, Target=1')], axis = 1))
print()
f, (ax1, ax2) = plt.subplots(1, 2, figsize = (10, 6))
g = sns.countplot(x = train_class_df['AgeBins'], ax = ax1).set_title('Count Plot of Age Bins, Overall')
g = sns.countplot(x = train_class_df.loc[train_class_df['Target'] == 1, 'AgeBins'], ax = ax2).set_title('Count Plot of Age Bins, Pneumonia Evidence')
plt.tight_layout()
print('Exploring the bounding boxes centers for `AgeBins` for random sample = 200')
# Creating a dataframe with columns for center of the rectangles
bboxes = train_class_df[train_class_df['Target'] == 1]
bboxes['xw'] = bboxes['x'] + bboxes['width'] / 2
bboxes['yh'] = bboxes['y'] + bboxes['height'] / 2
df1 = bboxes[bboxes['AgeBins'] == '<=26'].sample(200)
df2 = bboxes[bboxes['AgeBins'] == '<=100'].sample(200)
bboxes_scatter(df1, df2, '1 < AgeBins < 26 (Lower Bin)', '76 < AgeBins < 100 (Upper Bin)')
print('Checking distribution of age for those with Pneumonia Evidence, by Gender & Count Plot of Gender'); print('--'*40)
display(pd.concat([train_class_df['PatientSex'].value_counts(normalize = True).round(2).sort_values().rename('% Gender, Overall'),
train_class_df.loc[(train_class_df['Target'] == 1), 'PatientSex']
.value_counts(normalize = True).round(2).sort_index().rename('% Gender, Target=1')], axis = 1))
f, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2, figsize = (10, 10))
g = sns.histplot(train_class_df.loc[(train_class_df['Target'] == 1) & (train_class_df['PatientSex'] == 'M'), 'PatientAge'], ax = ax1).set_title('Distribution of Age for Male, Pneumonia Evidence')
g = sns.histplot(train_class_df.loc[(train_class_df['Target'] == 1) & (train_class_df['PatientSex'] == 'F'), 'PatientAge'], ax = ax2).set_title('Distribution of Age for Female, Pneumonia Evidence')
g = sns.countplot(y = train_class_df['PatientSex'], ax = ax3, palette = 'PuOr').set_title('Count Plot of Gender, Overall')
g = sns.countplot(y = train_class_df.loc[(train_class_df['Target'] == 1), 'PatientSex'], ax = ax4, palette = 'PuOr').set_title('Count Plot of Gender, Pneumonia Evidence')
plt.tight_layout()
print('Exploring the bounding boxes centers for `PatientSex` for random sample = 1000')
df1 = bboxes[bboxes['PatientSex'] == 'M'].sample(1000)
df2 = bboxes[bboxes['PatientSex'] == 'F'].sample(1000)
bboxes_scatter(df1, df2, 'PatientSex = M', 'PatientSex = F')
# train_class_df.drop(['BodyPartExamined', 'Modality', 'AgeBins'], inplace = True, axis = 1)
# train_class_df.to_pickle('/content/drive/MyDrive/Capstone-Pneumonia/train_class_features.pkl')
train_class_df = pd.read_pickle(images_path + '/../train_class_features.pkl')
display(train_class_df.shape, train_class_df.head())
print('Checking sample for different classes')
sample1 = train_class_df.query("`class` == 'Normal'").iloc[0]
sample2 = train_class_df.query("`class` == 'No Lung Opacity / Not Normal'").iloc[0]
sample3 = train_class_df.query("`class` == 'Lung Opacity'").iloc[1]
ds1 = dicom.dcmread(images_path + '/stage_2_train_images/' + sample1['path'].split('/')[-1])
ds2 = dicom.dcmread(images_path + '/stage_2_train_images/' + sample2['path'].split('/')[-1])
ds3 = dicom.dcmread(images_path + '/stage_2_train_images/' + sample3['path'].split('/')[-1])
f, ((ax1, ax2, ax3)) = plt.subplots(1, 3, figsize = (15, 8))
ax1.imshow(ds1.pixel_array, cmap = plt.cm.bone)
ax1.set_title('Class = Normal')
ax1.axis('off')
ax2.imshow(ds2.pixel_array, cmap = plt.cm.bone)
ax2.set_title('Class = No Lung Opacity / Not Normal')
ax2.axis('off')
ax3.imshow(ds3.pixel_array, cmap = plt.cm.bone)
ax3.set_title('Class = Lung Opacity')
ax3.axis('off')
plt.show()
sample4 = train_class_df.loc[(train_class_df['ViewPosition'] == 'AP')].iloc[0]
sample5 = train_class_df.loc[(train_class_df['ViewPosition'] == 'PA')].iloc[0]
ds4 = dicom.dcmread(images_path + '/stage_2_train_images/' + sample4['path'].split('/')[-1])
ds5 = dicom.dcmread(images_path + '/stage_2_train_images/' + sample5['path'].split('/')[-1])
f, ((ax1, ax2)) = plt.subplots(1, 2, figsize = (15, 8))
ax1.imshow(ds4.pixel_array, cmap = plt.cm.bone)
ax1.set_title('View Position = AP')
ax1.axis('off')
ax2.imshow(ds5.pixel_array, cmap = plt.cm.bone)
ax2.set_title('View Position = PA')
ax2.axis('off')
plt.show()
# Helper function to plot the dicom images
def plot_dicom_images(data, df, img_path):
img_data = list(data.T.to_dict().values())
#print(img_data)
f, ax = plt.subplots(3, 3, figsize = (16, 18))
for i, row in enumerate(img_data):
image = row['patientId'] + '.dcm'
#print(image)
path = os.path.join(img_path, image)
data = dicom.read_file(path)
rows = df[df['patientId'] == row['patientId']]
age = rows.PatientAge.unique().tolist()[0]
sex = data.PatientSex
part = data.BodyPartExamined
vp = data.ViewPosition
modality = data.Modality
data_img = dicom.dcmread(path)
#print(image)
ax[i//3, i%3].imshow(data_img.pixel_array, cmap = plt.cm.bone)
ax[i//3, i%3].axis('off')
ax[i//3, i%3].set_title('ID: {}\nAge: {}, Sex: {}, Part: {}, VP: {}, Modality: {}\nTarget: {}, Class: {}\nWindow: {}:{}:{}:{}'\
.format(row['patientId'], age, sex, part,
vp, modality, row['Target'],
row['class'], row['x'],
row['y'], row['width'],
row['height']))
box_data = list(rows.T.to_dict().values())
for j, row in enumerate(box_data):
ax[i//3, i%3].add_patch(Rectangle(xy = (row['x'], row['y']),
width = row['width'], height = row['height'],
color = 'red', alpha = 0.15))
plt.show()
# this function is a part of custom module imported earlier (`eda`)
plot_dicom_images(data = train_class_df.loc[(train_class_df['Target'] == 1)].sample(9),
df = train_class_df, img_path = TRAIN_IMG_DCM)
Overall the distribution is almost equal for `ViewPosition` but where there's a Pneumonia Evidence, `ViewPosition` is `AP` AP: Anterior/Posterior, PA: Posterior/Anterior -------------------------------------------------------------------------------- Plot x and y centers of bounding boxes --------------------------------------------------------------------------------
<ipython-input-48-3894e3a9efb4>:25: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy bboxes['xw'] = bboxes['x'] + bboxes['width'] / 2 <ipython-input-48-3894e3a9efb4>:26: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy bboxes['yh'] = bboxes['y'] + bboxes['height'] / 2
Exploring the bounding boxes centers for `ViewPositions` for random sample = 1000
/usr/local/lib/python3.9/dist-packages/pandas/plotting/_matplotlib/core.py:1114: UserWarning: No data for colormapping provided via 'c'. Parameters 'cmap' will be ignored scatter = ax.scatter(
Checking outliers in `PatientAge -------------------------------------------------------------------------------- Minimum `PatientAge` in the training dataset: 1.0 Maximum `PatientAge` in the training dataset: 100.0 75th Percentile of `PatientAge` in the training dataset: 59.0 `PatientAge` in upper whisker for box plot: 84.0 Using pd.clip to set upper threshold of 100 for age and remove outliers -------------------------------------------------------------------------------- Get the distribution of `PatientAge` overall and where Target = 1 -------------------------------------------------------------------------------- Creating Age Binning field -------------------------------------------------------------------------------- Value counts of the age bin field created --------------------------------------------------------------------------------
| Counts of Age Bins, Overall | Counts of Age Bins, Target=1 | |
|---|---|---|
| <=26 | 3972 | 1478 |
| <=50 | 12157 | 3917 |
| <=75 | 13318 | 3895 |
| <=100 | 780 | 265 |
Exploring the bounding boxes centers for `AgeBins` for random sample = 200
<ipython-input-48-3894e3a9efb4>:102: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy bboxes['xw'] = bboxes['x'] + bboxes['width'] / 2 <ipython-input-48-3894e3a9efb4>:103: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy bboxes['yh'] = bboxes['y'] + bboxes['height'] / 2 /usr/local/lib/python3.9/dist-packages/pandas/plotting/_matplotlib/core.py:1114: UserWarning: No data for colormapping provided via 'c'. Parameters 'cmap' will be ignored scatter = ax.scatter(
Checking distribution of age for those with Pneumonia Evidence, by Gender & Count Plot of Gender --------------------------------------------------------------------------------
| % Gender, Overall | % Gender, Target=1 | |
|---|---|---|
| F | 0.43 | 0.42 |
| M | 0.57 | 0.58 |
Exploring the bounding boxes centers for `PatientSex` for random sample = 1000
/usr/local/lib/python3.9/dist-packages/pandas/plotting/_matplotlib/core.py:1114: UserWarning: No data for colormapping provided via 'c'. Parameters 'cmap' will be ignored scatter = ax.scatter(
(30227, 12)
| patientId | x | y | width | height | Target | class | class | path | PatientSex | PatientAge | ViewPosition | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0004cfab-14fd-4e49-80ba-63a80b6bddd6 | 0.0 | 0.0 | 0.0 | 0.0 | 0 | No Lung Opacity / Not Normal | No Lung Opacity / Not Normal | /content/drive/MyDrive/Capstone-Pneumonia/10.C... | F | 51.0 | PA |
| 1 | 00313ee0-9eaa-42f4-b0ab-c148ed3241cd | 0.0 | 0.0 | 0.0 | 0.0 | 0 | No Lung Opacity / Not Normal | No Lung Opacity / Not Normal | /content/drive/MyDrive/Capstone-Pneumonia/10.C... | F | 48.0 | PA |
| 2 | 00322d4d-1c29-4943-afc9-b6754be640eb | 0.0 | 0.0 | 0.0 | 0.0 | 0 | No Lung Opacity / Not Normal | No Lung Opacity / Not Normal | /content/drive/MyDrive/Capstone-Pneumonia/10.C... | M | 19.0 | AP |
| 3 | 003d8fa0-6bf1-40ed-b54c-ac657f8495c5 | 0.0 | 0.0 | 0.0 | 0.0 | 0 | Normal | Normal | /content/drive/MyDrive/Capstone-Pneumonia/10.C... | M | 28.0 | PA |
| 4 | 00436515-870c-4b36-a041-de91049b9ab4 | 264.0 | 152.0 | 213.0 | 379.0 | 1 | Lung Opacity | Lung Opacity | /content/drive/MyDrive/Capstone-Pneumonia/10.C... | F | 32.0 | AP |
Checking sample for different classes
#writing a helper function to get the box coordinates
def img_box_coordinates_from_idx(idx):
file_name = os.path.splitext(fn[idx])[0]
cordn = df1A[df1A['patientId'] == file_name].iloc[0][1:5]
return cordn
def img_box_coordinates (x, y, w, h):
points = []
coord = []
if x == np.NaN:
return points, coord
x1 = int(x) #upper left
y1 = int(y)
x2 = int(x) #bottom left
y2 = int(y+h)
x3 = int(x+w) #bottom right
y3 = int(y+h)
x4 = int(x+w) #upper right
y4 = int(y)
points.append(np.array([[x1,y1], [x2,y2], [x3,y3], [x4,y4]]))
coord.append([[x1,y1],[x3,y3]])
return points, coord
#take a random image
display(Y_dash[Y_dash[0] == 1].head())
img_num = 28 #pick any index in Y_dash with Lung Opacity as 1
img = X[img_num]
#get the box parameters x,y,w,h
lt = img_box_coordinates_from_idx(img_num)
_, pt = img_box_coordinates(lt[0], lt[1], lt[2], lt[3])
#put a box on the image
factor = 256/1024 #doing this as the original res. was 1024x1024 and we resized to 256x256
img = cv2.rectangle(img,
[int(a) for a in np.multiply(pt[0][0],factor)],
[int(b) for b in np.multiply(pt[0][1],factor)],
(255,255,255), 2)
cv2_imshow(img)
| 0 | 1 | 2 | |
|---|---|---|---|
| 10 | 1 | 0 | 0 |
| 21 | 1 | 0 | 0 |
| 24 | 1 | 0 | 0 |
| 28 | 1 | 0 | 0 |
| 29 | 1 | 0 | 0 |
#start with lenet-5CNN for basic Classification task - dont bother about bounding box here
print("X train shape ", X_train.shape)
print("Y train shape ", Y_train.shape)
print("X test shape ", X_test.shape)
print("Y test shape ", Y_test.shape)
def basic_CNN():
model = Sequential()
model.add(Conv2D(filters = 6, kernel_size = (5, 5), input_shape=(256,256,1), activation = 'relu'))
model.add(AveragePooling2D())
model.add(Conv2D(filters = 16, kernel_size = (5, 5), activation = 'relu'))
model.add(AveragePooling2D())
model.add(Flatten())
model.add(Dense(units = 120, activation = 'relu'))
model.add(BatchNormalization())
model.add(Dense(units = 84, activation = 'relu'))
model.add(BatchNormalization())
model.add(Dense(units = 3, activation = 'softmax'))
return model
model = basic_CNN()
model.summary()
opt = optimizers.Adam(learning_rate=0.001)
model.compile(optimizer = opt, loss = 'categorical_crossentropy', metrics = ['accuracy'])
print("Model compiled!")
X train shape (21347, 256, 256, 1)
Y train shape (21347, 3)
X test shape (5337, 256, 256, 1)
Y test shape (5337, 3)
Model: "sequential_1"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 252, 252, 6) 156
average_pooling2d (AverageP (None, 126, 126, 6) 0
ooling2D)
conv2d_1 (Conv2D) (None, 122, 122, 16) 2416
average_pooling2d_1 (Averag (None, 61, 61, 16) 0
ePooling2D)
flatten_1 (Flatten) (None, 59536) 0
dense_2 (Dense) (None, 120) 7144440
batch_normalization_1 (Batc (None, 120) 480
hNormalization)
dense_3 (Dense) (None, 84) 10164
batch_normalization_2 (Batc (None, 84) 336
hNormalization)
dense_4 (Dense) (None, 3) 255
=================================================================
Total params: 7,158,247
Trainable params: 7,157,839
Non-trainable params: 408
_________________________________________________________________
Model compiled!
history1 = model.fit(X_train, Y_train, batch_size=256, epochs=30, validation_data = (X_test,Y_test))
fig, axis = plt.subplots(1,2, figsize=(15,5))
axis[0].plot(history1.history['loss'])
axis[0].plot(history1.history['val_loss'])
axis[0].set_title('model loss')
axis[0].set_ylabel('loss')
axis[0].set_xlabel('epoch')
axis[0].legend(['training', 'validation'], loc='best')
axis[0].set_ylim(0, 10)
axis[1].plot(history1.history['accuracy'])
axis[1].plot(history1.history['val_accuracy'])
axis[1].set_title('model accuracy')
axis[1].set_ylabel('accuracy')
axis[1].set_xlabel('epoch')
axis[1].legend(['training', 'validation'], loc='best')
axis[1].set_ylim(0, 1)
plt.show()
#pickle the file only once, if not retuning there after.
# pickle.dump(model, open(images_path + 'model_basic_CNN_lenet.pkl', 'wb'))
Epoch 1/30 84/84 [==============================] - 8s 35ms/step - loss: 0.9572 - accuracy: 0.5470 - val_loss: 1.3201 - val_accuracy: 0.5252 Epoch 2/30 84/84 [==============================] - 2s 27ms/step - loss: 0.8178 - accuracy: 0.6116 - val_loss: 1.1019 - val_accuracy: 0.5156 Epoch 3/30 84/84 [==============================] - 2s 28ms/step - loss: 0.7721 - accuracy: 0.6337 - val_loss: 1.8134 - val_accuracy: 0.4006 Epoch 4/30 84/84 [==============================] - 2s 27ms/step - loss: 0.7219 - accuracy: 0.6696 - val_loss: 1.0853 - val_accuracy: 0.5089 Epoch 5/30 84/84 [==============================] - 2s 27ms/step - loss: 0.6408 - accuracy: 0.7173 - val_loss: 1.4052 - val_accuracy: 0.5052 Epoch 6/30 84/84 [==============================] - 2s 27ms/step - loss: 0.5493 - accuracy: 0.7647 - val_loss: 1.1785 - val_accuracy: 0.5095 Epoch 7/30 84/84 [==============================] - 2s 27ms/step - loss: 0.4267 - accuracy: 0.8285 - val_loss: 1.0886 - val_accuracy: 0.5473 Epoch 8/30 84/84 [==============================] - 2s 29ms/step - loss: 0.2928 - accuracy: 0.8964 - val_loss: 1.0310 - val_accuracy: 0.5479 Epoch 9/30 84/84 [==============================] - 2s 28ms/step - loss: 0.1739 - accuracy: 0.9523 - val_loss: 1.3435 - val_accuracy: 0.5274 Epoch 10/30 84/84 [==============================] - 2s 27ms/step - loss: 0.0950 - accuracy: 0.9810 - val_loss: 1.4126 - val_accuracy: 0.5591 Epoch 11/30 84/84 [==============================] - 2s 27ms/step - loss: 0.0420 - accuracy: 0.9966 - val_loss: 1.5662 - val_accuracy: 0.5644 Epoch 12/30 84/84 [==============================] - 2s 28ms/step - loss: 0.0186 - accuracy: 0.9997 - val_loss: 1.4883 - val_accuracy: 0.5707 Epoch 13/30 84/84 [==============================] - 2s 27ms/step - loss: 0.0103 - accuracy: 1.0000 - val_loss: 1.6799 - val_accuracy: 0.5823 Epoch 14/30 84/84 [==============================] - 2s 27ms/step - loss: 0.0065 - accuracy: 1.0000 - val_loss: 1.7386 - val_accuracy: 0.5719 Epoch 15/30 84/84 [==============================] - 2s 27ms/step - loss: 0.0045 - accuracy: 1.0000 - val_loss: 1.8158 - val_accuracy: 0.5814 Epoch 16/30 84/84 [==============================] - 2s 27ms/step - loss: 0.0035 - accuracy: 1.0000 - val_loss: 1.8649 - val_accuracy: 0.5752 Epoch 17/30 84/84 [==============================] - 2s 28ms/step - loss: 0.0028 - accuracy: 1.0000 - val_loss: 1.9303 - val_accuracy: 0.5769 Epoch 18/30 84/84 [==============================] - 2s 28ms/step - loss: 0.0023 - accuracy: 1.0000 - val_loss: 1.9442 - val_accuracy: 0.5780 Epoch 19/30 84/84 [==============================] - 2s 27ms/step - loss: 0.0018 - accuracy: 1.0000 - val_loss: 1.9827 - val_accuracy: 0.5752 Epoch 20/30 84/84 [==============================] - 2s 27ms/step - loss: 0.0017 - accuracy: 1.0000 - val_loss: 2.0026 - val_accuracy: 0.5780 Epoch 21/30 84/84 [==============================] - 2s 28ms/step - loss: 0.0016 - accuracy: 1.0000 - val_loss: 1.9902 - val_accuracy: 0.5741 Epoch 22/30 84/84 [==============================] - 2s 27ms/step - loss: 0.0012 - accuracy: 1.0000 - val_loss: 2.0563 - val_accuracy: 0.5747 Epoch 23/30 84/84 [==============================] - 2s 28ms/step - loss: 0.0011 - accuracy: 1.0000 - val_loss: 2.0761 - val_accuracy: 0.5769 Epoch 24/30 84/84 [==============================] - 2s 27ms/step - loss: 9.2335e-04 - accuracy: 1.0000 - val_loss: 2.0976 - val_accuracy: 0.5765 Epoch 25/30 84/84 [==============================] - 2s 28ms/step - loss: 8.3767e-04 - accuracy: 1.0000 - val_loss: 2.0959 - val_accuracy: 0.5777 Epoch 26/30 84/84 [==============================] - 2s 28ms/step - loss: 7.6694e-04 - accuracy: 1.0000 - val_loss: 2.1121 - val_accuracy: 0.5743 Epoch 27/30 84/84 [==============================] - 2s 28ms/step - loss: 6.9993e-04 - accuracy: 1.0000 - val_loss: 2.1513 - val_accuracy: 0.5726 Epoch 28/30 84/84 [==============================] - 2s 28ms/step - loss: 6.1010e-04 - accuracy: 1.0000 - val_loss: 2.1407 - val_accuracy: 0.5758 Epoch 29/30 84/84 [==============================] - 2s 27ms/step - loss: 5.5900e-04 - accuracy: 1.0000 - val_loss: 2.1840 - val_accuracy: 0.5765 Epoch 30/30 84/84 [==============================] - 2s 28ms/step - loss: 5.1344e-04 - accuracy: 1.0000 - val_loss: 2.2004 - val_accuracy: 0.5790
#metrics for the training data
pickled_model = pickle.load(open(images_path + 'model_basic_CNN_lenet.pkl', 'rb'))
y_pred = pickled_model.predict(X_train)
y_pred_final=[]
for i in y_pred:
y_pred_final.append(np.argmax(i))
Y_train_true = Y_train.idxmax(axis=1)
#This is for all the training data (includes the validation data during the training)
print(classification_report(Y_train_true, y_pred_final))
cm=confusion_matrix(Y_train_true, y_pred_final)
plt.figure(figsize=(10,7))
sns.heatmap(cm,annot=True,fmt='d')
plt.xlabel('Truth')
plt.ylabel('Predicted')
plt.show()
#get the accuracy no.s
train_acc = cm.diagonal()/cm.sum(axis=1)
train_acc_overall = accuracy_score(Y_train_true, y_pred_final)
#metrics for the test data -> this data we have not seen yet.
y_pred_test = pickled_model.predict(X_test)
y_pred_test_final=[]
for i in y_pred_test:
#index 0 is 'Lung Opacity', 1 is 'Normal' or 'No Pnemonia'
y_pred_test_final.append(np.argmax(i))
Y_test_true = Y_test.idxmax(axis=1)
#This is for all the training data (includes the validation data during the training)
print(classification_report(Y_test_true, y_pred_test_final))
cm=confusion_matrix(Y_test_true, y_pred_test_final)
plt.figure(figsize=(10,7))
sns.heatmap(cm,annot=True,fmt='d')
plt.xlabel('Truth')
plt.ylabel('Predicted')
plt.show()
#get the accuracy no.s
test_acc = cm.diagonal()/cm.sum(axis=1)
test_acc_overall = accuracy_score(Y_test_true, y_pred_test_final)
#Put the model and Accuracy figures for test and train data in a dataframe
perf_dict = {'Model':'CNN Lenet'}
perf_dict['Tr_Acc Overall '] = train_acc_overall
for i in range(len(train_acc)):
perf_dict['Tr_Acc ' + list(le.classes_)[i]] = train_acc[i]
perf_dict['Ts_Acc Overall '] = test_acc_overall
for i in range(len(test_acc)):
perf_dict['Ts_Acc ' + list(le.classes_)[i]] = test_acc[i]
df_results = pd.DataFrame(perf_dict, index=[0])
df_results.to_excel(images_path + 'df_results.xlsx', index=False)
Keras model archive loading:
File Name Modified Size
metadata.json 2023-03-13 11:46:50 64
config.json 2023-03-13 11:46:50 5061
variables.h5 2023-03-13 11:46:50 85943320
Keras weights file (<HDF5 file "variables.h5" (mode r)>) loading:
...layers
......average_pooling2d
.........vars
......average_pooling2d_1
.........vars
......batch_normalization
.........vars
............0
............1
............2
............3
......batch_normalization_1
.........vars
............0
............1
............2
............3
......conv2d
.........vars
............0
............1
......conv2d_1
.........vars
............0
............1
......dense
.........vars
............0
............1
......dense_1
.........vars
............0
............1
......dense_2
.........vars
............0
............1
......flatten
.........vars
...metrics
......mean
.........vars
............0
............1
......mean_metric_wrapper
.........vars
............0
............1
...optimizer
......vars
.........0
.........1
.........10
.........11
.........12
.........13
.........14
.........15
.........16
.........17
.........18
.........19
.........2
.........20
.........21
.........22
.........23
.........24
.........25
.........26
.........27
.........28
.........3
.........4
.........5
.........6
.........7
.........8
.........9
...vars
668/668 [==============================] - 11s 5ms/step
precision recall f1-score support
0 1.00 1.00 1.00 4814
1 1.00 1.00 1.00 9455
2 1.00 1.00 1.00 7078
accuracy 1.00 21347
macro avg 1.00 1.00 1.00 21347
weighted avg 1.00 1.00 1.00 21347
167/167 [==============================] - 1s 5ms/step
precision recall f1-score support
0 0.47 0.41 0.44 1198
1 0.56 0.59 0.57 2366
2 0.66 0.69 0.67 1773
accuracy 0.58 5337
macro avg 0.57 0.56 0.56 5337
weighted avg 0.57 0.58 0.58 5337
# datagen = ImageDataGenerator(
# featurewise_center=False, # set input mean to 0 over the dataset
# samplewise_center=False, # set each sample mean to 0
# featurewise_std_normalization=False, # divide inputs by std of the dataset
# samplewise_std_normalization=False, # divide each input by its std
# zca_whitening=False, # apply ZCA whitening
# rotation_range=10, # randomly rotate images in the range (degrees, 0 to 180)
# zoom_range = 0.1, # Randomly zoom image
# width_shift_range=0.1, # randomly shift images horizontally (fraction of total width)
# height_shift_range=0.1, # randomly shift images vertically (fraction of total height)
# horizontal_flip=True, # randomly flip images
# vertical_flip=False) # randomly flip images
# datagen.fit(X_train)
# print("Shape of training set ", X_train.shape)
def tuned_basic_CNN():
model = Sequential()
model.add(Conv2D(filters = 8, kernel_size = (3, 3), input_shape=(256,256,1), activation = 'relu', kernel_initializer='he_normal', kernel_regularizer='l2'))
model.add(BatchNormalization())
model.add(MaxPooling2D())
model.add(Conv2D(filters = 32, kernel_size = (3, 3), activation = 'relu', kernel_initializer='he_normal', kernel_regularizer='l2'))
model.add(BatchNormalization())
model.add(MaxPooling2D())
model.add(Conv2D(filters = 128, kernel_size = (3, 3), activation = 'relu', kernel_initializer='he_normal', kernel_regularizer='l2'))
model.add(BatchNormalization())
model.add(MaxPooling2D())
model.add(Conv2D(filters = 256, kernel_size = (3, 3), activation = 'relu', kernel_initializer='he_normal', kernel_regularizer='l2'))
model.add(BatchNormalization())
model.add(MaxPooling2D())
model.add(Flatten())
model.add(Dense(units = 256, activation = 'relu', kernel_initializer='he_normal', kernel_regularizer='l2'))
model.add(BatchNormalization())
model.add(Dropout(0.4))
model.add(Dense(units = 32, activation = 'relu', kernel_initializer='he_normal', kernel_regularizer='l2'))
model.add(BatchNormalization())
model.add(Dropout(0.4))
model.add(Dense(units = 3, activation = 'softmax', kernel_initializer='he_normal'))
# model = Sequential()
# model.add(Conv2D(filters = 16, kernel_size = (3, 3), input_shape=(256,256,1), activation = 'relu', padding = 'same', kernel_initializer='he_normal', kernel_regularizer='l2'))
# model.add(BatchNormalization())
# model.add(MaxPooling2D(pool_size = (2, 2)))
# model.add(Conv2D(filters = 64, kernel_size = (3, 3), activation = 'relu', padding = 'same', kernel_initializer='he_normal', kernel_regularizer='l2'))
# model.add(BatchNormalization())
# model.add(MaxPooling2D(pool_size = (2, 2)))
# model.add(Conv2D(filters = 128, kernel_size = (3, 3), activation = 'relu', padding = 'same', kernel_initializer='he_normal', kernel_regularizer='l2'))
# model.add(BatchNormalization())
# model.add(MaxPooling2D(pool_size = (2, 2)))
# model.add(Conv2D(filters = 256, kernel_size = (3, 3), activation = 'relu', padding = 'same', kernel_initializer='he_normal', kernel_regularizer='l2'))
# model.add(BatchNormalization())
# model.add(MaxPooling2D(pool_size = (2, 2)))
# model.add(Conv2D(filters = 512, kernel_size = (3, 3), activation = 'relu', padding = 'same', kernel_initializer='he_normal', kernel_regularizer='l2'))
# model.add(BatchNormalization())
# model.add(MaxPooling2D(pool_size = (2, 2)))
# model.add(Flatten())
# model.add(Dense(units = 256, activation = 'relu', kernel_initializer='he_normal', kernel_regularizer='l2'))
# # model.add(Dense(units = 128, activation = 'relu', kernel_initializer='he_normal', kernel_regularizer='l2'))
# model.add(BatchNormalization())
# model.add(Dropout(0.4))
# model.add(Dense(units = 64, activation = 'relu', kernel_initializer='he_normal', kernel_regularizer='l2'))
# model.add(BatchNormalization())
# model.add(Dropout(0.4))
# model.add(Dense(units = 16, activation = 'relu', kernel_initializer='he_normal', kernel_regularizer='l2'))
# model.add(BatchNormalization())
# model.add(Dropout(0.4))
# model.add(Dense(units = 3, activation = 'softmax'))
return model
model_tuned = tuned_basic_CNN()
model_tuned.summary()
opt = optimizers.Adam(learning_rate=0.0001)
model_tuned.compile(optimizer = opt, loss = 'categorical_crossentropy', metrics = ['accuracy'])
# history2 = model_tuned.fit_generator(datagen.flow(X_train,Y_train, batch_size=256),
# epochs = 30, validation_data = (X_test,Y_test),
# verbose = 1, callbacks=[earlystop])
history2 = model_tuned.fit(X_train, Y_train, batch_size=256, epochs=30, validation_data = (X_test,Y_test))
fig, axis = plt.subplots(1,2, figsize=(15,5))
axis[0].plot(history2.history['loss'])
axis[0].plot(history2.history['val_loss'])
axis[0].set_title('model loss')
axis[0].set_ylabel('loss')
axis[0].set_xlabel('epoch')
axis[0].legend(['training', 'validation'], loc='best')
axis[0].set_ylim(0, 10)
axis[1].plot(history2.history['accuracy'])
axis[1].plot(history2.history['val_accuracy'])
axis[1].set_title('model accuracy')
axis[1].set_ylabel('accuracy')
axis[1].set_xlabel('epoch')
axis[1].legend(['training', 'validation'], loc='best')
axis[1].set_ylim(0, 1)
plt.show()
#pickle the model if only doing inferences after that.
# pickle.dump(model_tuned, open(images_path + 'model_tuned_CNN.pkl', 'wb'))
#added one more CNN (64) with maxpooling -> 73% 61% NS
#all maxpooling, CNN(128) added -> 88%, 60% NS
#CNN 6/16/64/128 -> 6/30/90/150, FC is 128/16/3 -> 256/64/3; -> 96%, 58%
#regul. l1 -> 60%, 59%.. NS
#reverted to l2, increased lr to 0.001 -> 71%, 62%. NS
#expanded CNN's
#32,64,128 and 32,8,3 with batch-norm, dropout and adam-0.001, 98%accuracy but validation is 51%
#add kernel initialization, validation improved to ~60%.
#add 256 CNN and 128 Dense -> 98.4% train accuracy, 60% validation accuracy
#target is now only 0/1.. dense is 256/64/16/2..16M parms to train. got 99.4% train and upto 78-80% test accuracy
#added 512 CNN.. reduced trainable parms to 10M.. 99.3%, 77% accuracy
#added l2 regularization.. 82%, 74%
#with l1.. 79%, 67%
Model: "sequential_1"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_203 (Conv2D) (None, 254, 254, 8) 80
batch_normalization_204 (Ba (None, 254, 254, 8) 32
tchNormalization)
max_pooling2d_4 (MaxPooling (None, 127, 127, 8) 0
2D)
conv2d_204 (Conv2D) (None, 125, 125, 32) 2336
batch_normalization_205 (Ba (None, 125, 125, 32) 128
tchNormalization)
max_pooling2d_5 (MaxPooling (None, 62, 62, 32) 0
2D)
conv2d_205 (Conv2D) (None, 60, 60, 128) 36992
batch_normalization_206 (Ba (None, 60, 60, 128) 512
tchNormalization)
max_pooling2d_6 (MaxPooling (None, 30, 30, 128) 0
2D)
conv2d_206 (Conv2D) (None, 28, 28, 256) 295168
batch_normalization_207 (Ba (None, 28, 28, 256) 1024
tchNormalization)
max_pooling2d_7 (MaxPooling (None, 14, 14, 256) 0
2D)
flatten_1 (Flatten) (None, 50176) 0
dense_2 (Dense) (None, 256) 12845312
batch_normalization_208 (Ba (None, 256) 1024
tchNormalization)
dropout_1 (Dropout) (None, 256) 0
dense_3 (Dense) (None, 32) 8224
batch_normalization_209 (Ba (None, 32) 128
tchNormalization)
dropout_2 (Dropout) (None, 32) 0
dense_4 (Dense) (None, 3) 99
=================================================================
Total params: 13,191,059
Trainable params: 13,189,635
Non-trainable params: 1,424
_________________________________________________________________
Epoch 1/30
84/84 [==============================] - 17s 120ms/step - loss: 14.4676 - accuracy: 0.4827 - val_loss: 13.3074 - val_accuracy: 0.4697
Epoch 2/30
84/84 [==============================] - 9s 105ms/step - loss: 12.1343 - accuracy: 0.5390 - val_loss: 11.0616 - val_accuracy: 0.5458
Epoch 3/30
84/84 [==============================] - 9s 105ms/step - loss: 10.2953 - accuracy: 0.5587 - val_loss: 9.4260 - val_accuracy: 0.5898
Epoch 4/30
84/84 [==============================] - 9s 105ms/step - loss: 8.8602 - accuracy: 0.5772 - val_loss: 8.1851 - val_accuracy: 0.5934
Epoch 5/30
84/84 [==============================] - 9s 105ms/step - loss: 7.7033 - accuracy: 0.5987 - val_loss: 7.1823 - val_accuracy: 0.5955
Epoch 6/30
84/84 [==============================] - 9s 105ms/step - loss: 6.7939 - accuracy: 0.6148 - val_loss: 6.4120 - val_accuracy: 0.5861
Epoch 7/30
84/84 [==============================] - 9s 105ms/step - loss: 6.0551 - accuracy: 0.6325 - val_loss: 5.7581 - val_accuracy: 0.6150
Epoch 8/30
84/84 [==============================] - 9s 105ms/step - loss: 5.4481 - accuracy: 0.6560 - val_loss: 5.2784 - val_accuracy: 0.6045
Epoch 9/30
84/84 [==============================] - 9s 105ms/step - loss: 4.9679 - accuracy: 0.6803 - val_loss: 4.9198 - val_accuracy: 0.6031
Epoch 10/30
84/84 [==============================] - 9s 105ms/step - loss: 4.5908 - accuracy: 0.7002 - val_loss: 4.5626 - val_accuracy: 0.6221
Epoch 11/30
84/84 [==============================] - 9s 105ms/step - loss: 4.2488 - accuracy: 0.7306 - val_loss: 4.3427 - val_accuracy: 0.6099
Epoch 12/30
84/84 [==============================] - 9s 105ms/step - loss: 3.9404 - accuracy: 0.7638 - val_loss: 4.1494 - val_accuracy: 0.6000
Epoch 13/30
84/84 [==============================] - 9s 105ms/step - loss: 3.6658 - accuracy: 0.8042 - val_loss: 3.9455 - val_accuracy: 0.6206
Epoch 14/30
84/84 [==============================] - 9s 105ms/step - loss: 3.4232 - accuracy: 0.8369 - val_loss: 3.8050 - val_accuracy: 0.6031
Epoch 15/30
84/84 [==============================] - 9s 105ms/step - loss: 3.2145 - accuracy: 0.8653 - val_loss: 3.6891 - val_accuracy: 0.6041
Epoch 16/30
84/84 [==============================] - 9s 105ms/step - loss: 3.0105 - accuracy: 0.8959 - val_loss: 3.6840 - val_accuracy: 0.5631
Epoch 17/30
84/84 [==============================] - 9s 105ms/step - loss: 2.8321 - accuracy: 0.9147 - val_loss: 3.5640 - val_accuracy: 0.5709
Epoch 18/30
84/84 [==============================] - 9s 105ms/step - loss: 2.6694 - accuracy: 0.9330 - val_loss: 3.5142 - val_accuracy: 0.5745
Epoch 19/30
84/84 [==============================] - 9s 105ms/step - loss: 2.5281 - accuracy: 0.9442 - val_loss: 3.3845 - val_accuracy: 0.5814
Epoch 20/30
84/84 [==============================] - 9s 105ms/step - loss: 2.4106 - accuracy: 0.9498 - val_loss: 3.2979 - val_accuracy: 0.5822
Epoch 21/30
84/84 [==============================] - 9s 105ms/step - loss: 2.2929 - accuracy: 0.9581 - val_loss: 3.3082 - val_accuracy: 0.5750
Epoch 22/30
84/84 [==============================] - 9s 105ms/step - loss: 2.2075 - accuracy: 0.9584 - val_loss: 3.2113 - val_accuracy: 0.5868
Epoch 23/30
84/84 [==============================] - 9s 105ms/step - loss: 2.1208 - accuracy: 0.9635 - val_loss: 3.2130 - val_accuracy: 0.5717
Epoch 24/30
84/84 [==============================] - 9s 105ms/step - loss: 2.0356 - accuracy: 0.9661 - val_loss: 3.3144 - val_accuracy: 0.5645
Epoch 25/30
84/84 [==============================] - 9s 105ms/step - loss: 1.9484 - accuracy: 0.9716 - val_loss: 3.0922 - val_accuracy: 0.5923
Epoch 26/30
84/84 [==============================] - 9s 105ms/step - loss: 1.8925 - accuracy: 0.9677 - val_loss: 3.0783 - val_accuracy: 0.5934
Epoch 27/30
84/84 [==============================] - 9s 105ms/step - loss: 1.8466 - accuracy: 0.9678 - val_loss: 3.2326 - val_accuracy: 0.5799
Epoch 28/30
84/84 [==============================] - 9s 105ms/step - loss: 1.8013 - accuracy: 0.9677 - val_loss: 3.0789 - val_accuracy: 0.5767
Epoch 29/30
84/84 [==============================] - 9s 105ms/step - loss: 1.7323 - accuracy: 0.9754 - val_loss: 3.1788 - val_accuracy: 0.5782
Epoch 30/30
84/84 [==============================] - 9s 105ms/step - loss: 1.6876 - accuracy: 0.9711 - val_loss: 3.1201 - val_accuracy: 0.5812
#Results from tuned model
pickled_model_tuned_CNN = pickle.load(open(images_path + 'model_tuned_CNN.pkl', 'rb'))
y_pred = pickled_model_tuned_CNN.predict(X_train)
y_pred_final=[]
for i in y_pred:
y_pred_final.append(np.argmax(i))
Y_train_true = Y_train.idxmax(axis=1)
#This is for all the training data (includes the validation data during the training)
print(classification_report(y_pred_final, Y_train_true))
cm=confusion_matrix(Y_train_true, y_pred_final)
plt.figure(figsize=(10,7))
sns.heatmap(cm,annot=True,fmt='d')
plt.xlabel('Truth')
plt.ylabel('Predicted')
plt.show()
train_acc = cm.diagonal()/cm.sum(axis=1)
train_acc_overall = accuracy_score(Y_train_true, y_pred_final)
#metrics for the test data -> this data we have not seen yet.
y_pred_test = pickled_model_tuned_CNN.predict(X_test)
y_pred_test_final=[]
for i in y_pred_test:
#index 0 is 'Lung Opacity', 1 is 'Normal' or 'No Pnemonia'
y_pred_test_final.append(np.argmax(i))
Y_test_true = Y_test.idxmax(axis=1)
#This is for all the training data (includes the validation data during the training)
print(classification_report(y_pred_test_final, Y_test_true))
cm=confusion_matrix(Y_test_true, y_pred_test_final)
plt.figure(figsize=(10,7))
sns.heatmap(cm,annot=True,fmt='d')
plt.xlabel('Truth')
plt.ylabel('Predicted')
plt.show()
#get the accuracy no.s
test_acc = cm.diagonal()/cm.sum(axis=1)
test_acc_overall = accuracy_score(Y_test_true, y_pred_test_final)
#Put the model and Accuracy figures for test and train data in a dataframe
perf_dict = {'Model':'CNN Tuned'}
perf_dict['Tr_Acc Overall '] = train_acc_overall
for i in range(len(train_acc)):
perf_dict['Tr_Acc ' + list(le.classes_)[i]] = train_acc[i]
perf_dict['Ts_Acc Overall '] = test_acc_overall
for i in range(len(test_acc)):
perf_dict['Ts_Acc ' + list(le.classes_)[i]] = test_acc[i]
df_results = pd.read_excel(images_path + 'df_results.xlsx')
df_results.loc[len(df_results.index)] = list(perf_dict.values())
df_results.to_excel(images_path + 'df_results.xlsx', index=False)
Keras model archive loading:
File Name Modified Size
metadata.json 2023-03-13 15:34:34 64
config.json 2023-03-13 15:34:34 9430
variables.h5 2023-03-13 15:34:36 158362920
Keras weights file (<HDF5 file "variables.h5" (mode r)>) loading:
...layers
......batch_normalization
.........vars
............0
............1
............2
............3
......batch_normalization_1
.........vars
............0
............1
............2
............3
......batch_normalization_2
.........vars
............0
............1
............2
............3
......batch_normalization_3
.........vars
............0
............1
............2
............3
......batch_normalization_4
.........vars
............0
............1
............2
............3
......batch_normalization_5
.........vars
............0
............1
............2
............3
......conv2d
.........vars
............0
............1
......conv2d_1
.........vars
............0
............1
......conv2d_2
.........vars
............0
............1
......conv2d_3
.........vars
............0
............1
......dense
.........vars
............0
............1
......dense_1
.........vars
............0
............1
......dense_2
.........vars
............0
............1
......dropout
.........vars
......dropout_1
.........vars
......flatten
.........vars
......max_pooling2d
.........vars
......max_pooling2d_1
.........vars
......max_pooling2d_2
.........vars
......max_pooling2d_3
.........vars
...metrics
......mean
.........vars
............0
............1
......mean_metric_wrapper
.........vars
............0
............1
...optimizer
......vars
.........0
.........1
.........10
.........11
.........12
.........13
.........14
.........15
.........16
.........17
.........18
.........19
.........2
.........20
.........21
.........22
.........23
.........24
.........25
.........26
.........27
.........28
.........29
.........3
.........30
.........31
.........32
.........33
.........34
.........35
.........36
.........37
.........38
.........39
.........4
.........40
.........41
.........42
.........43
.........44
.........45
.........46
.........47
.........48
.........49
.........5
.........50
.........51
.........52
.........6
.........7
.........8
.........9
...vars
668/668 [==============================] - 7s 10ms/step
precision recall f1-score support
0 0.94 1.00 0.97 4550
1 1.00 0.96 0.98 9883
2 0.97 1.00 0.99 6914
accuracy 0.98 21347
macro avg 0.97 0.98 0.98 21347
weighted avg 0.98 0.98 0.98 21347
167/167 [==============================] - 2s 12ms/step
precision recall f1-score support
0 0.28 0.50 0.36 668
1 0.73 0.54 0.62 3180
2 0.59 0.70 0.64 1489
accuracy 0.58 5337
macro avg 0.53 0.58 0.54 5337
weighted avg 0.63 0.58 0.59 5337
# Transfer learning - Resnet
from tensorflow.keras.applications.resnet50 import preprocess_input
X_train_rs = preprocess_input(X_train_rgb)
X_test_rs = preprocess_input(X_test_rgb)
def CNN_with_resnet():
model = Sequential()
base_model = ResNet50(include_top=False, weights='imagenet', input_shape=X_train_rs[0].shape)
base_model.trainable = False ## Not trainable weights
model.add(base_model)
model.add(Flatten())
model.add(Dense(units = 64, activation = 'relu', kernel_initializer='he_normal', kernel_regularizer='l2'))
model.add(BatchNormalization())
model.add(Dropout(0.4))
model.add(Dense(units = 3, activation = 'softmax', kernel_initializer='he_normal'))
return model
model_CNN_resnet = CNN_with_resnet()
model_CNN_resnet.summary()
opt = optimizers.Adam(learning_rate=0.0001)
model_CNN_resnet.compile(optimizer = opt, loss = 'categorical_crossentropy', metrics = ['accuracy'])
history3 = model_CNN_resnet.fit(X_train_rs, Y_train, batch_size=256, epochs=30, validation_data = (X_test_rs,Y_test))
fig, axis = plt.subplots(1,2, figsize=(15,5))
axis[0].plot(history3.history['loss'])
axis[0].plot(history3.history['val_loss'])
axis[0].set_title('model loss')
axis[0].set_ylabel('loss')
axis[0].set_xlabel('epoch')
axis[0].legend(['training', 'validation'], loc='best')
axis[0].set_ylim(0, 10)
axis[1].plot(history3.history['accuracy'])
axis[1].plot(history3.history['val_accuracy'])
axis[1].set_title('model accuracy')
axis[1].set_ylabel('accuracy')
axis[1].set_xlabel('epoch')
axis[1].legend(['training', 'validation'], loc='best')
axis[1].set_ylim(0, 1)
plt.show()
#pickle it only once.. once saved, use the model for inferences
pickle.dump(model_CNN_resnet, open(images_path + 'model_TL_Resnet50.pkl', 'wb'))
from tensorflow.keras.models import save_model, load_model
save_model(model_CNN_resnet, images_path + "model_TL_Resnet50.h5")
# resnet -> 97%, 59%
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/resnet/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5
94765736/94765736 [==============================] - 0s 0us/step
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
resnet50 (Functional) (None, 8, 8, 2048) 23587712
flatten (Flatten) (None, 131072) 0
dense (Dense) (None, 64) 8388672
batch_normalization (BatchN (None, 64) 256
ormalization)
dropout (Dropout) (None, 64) 0
dense_1 (Dense) (None, 3) 195
=================================================================
Total params: 31,976,835
Trainable params: 8,388,995
Non-trainable params: 23,587,840
_________________________________________________________________
Epoch 1/30
84/84 [==============================] - 45s 333ms/step - loss: 2.0399 - accuracy: 0.6086 - val_loss: 1.8702 - val_accuracy: 0.6153
Epoch 2/30
84/84 [==============================] - 16s 189ms/step - loss: 1.5350 - accuracy: 0.6798 - val_loss: 1.4219 - val_accuracy: 0.6871
Epoch 3/30
84/84 [==============================] - 16s 187ms/step - loss: 1.2634 - accuracy: 0.7241 - val_loss: 1.3077 - val_accuracy: 0.6758
Epoch 4/30
84/84 [==============================] - 16s 189ms/step - loss: 1.0743 - accuracy: 0.7669 - val_loss: 1.2933 - val_accuracy: 0.6639
Epoch 5/30
84/84 [==============================] - 16s 188ms/step - loss: 0.9124 - accuracy: 0.8122 - val_loss: 1.3281 - val_accuracy: 0.6063
Epoch 6/30
84/84 [==============================] - 16s 190ms/step - loss: 0.7825 - accuracy: 0.8541 - val_loss: 1.1993 - val_accuracy: 0.6580
Epoch 7/30
84/84 [==============================] - 16s 189ms/step - loss: 0.6820 - accuracy: 0.8825 - val_loss: 1.1814 - val_accuracy: 0.6624
Epoch 8/30
84/84 [==============================] - 16s 188ms/step - loss: 0.6052 - accuracy: 0.9042 - val_loss: 1.2463 - val_accuracy: 0.6470
Epoch 9/30
84/84 [==============================] - 16s 188ms/step - loss: 0.5287 - accuracy: 0.9293 - val_loss: 1.1912 - val_accuracy: 0.6453
Epoch 10/30
84/84 [==============================] - 16s 188ms/step - loss: 0.4696 - accuracy: 0.9430 - val_loss: 1.3124 - val_accuracy: 0.6365
Epoch 11/30
84/84 [==============================] - 16s 188ms/step - loss: 0.4324 - accuracy: 0.9496 - val_loss: 1.2193 - val_accuracy: 0.6196
Epoch 12/30
84/84 [==============================] - 16s 188ms/step - loss: 0.3966 - accuracy: 0.9591 - val_loss: 1.2642 - val_accuracy: 0.6361
Epoch 13/30
84/84 [==============================] - 16s 188ms/step - loss: 0.3805 - accuracy: 0.9606 - val_loss: 1.2605 - val_accuracy: 0.6416
Epoch 14/30
84/84 [==============================] - 16s 189ms/step - loss: 0.3528 - accuracy: 0.9633 - val_loss: 1.3222 - val_accuracy: 0.6277
Epoch 15/30
84/84 [==============================] - 16s 187ms/step - loss: 0.3484 - accuracy: 0.9604 - val_loss: 1.3284 - val_accuracy: 0.6412
Epoch 16/30
84/84 [==============================] - 16s 187ms/step - loss: 0.3408 - accuracy: 0.9618 - val_loss: 1.3028 - val_accuracy: 0.6253
Epoch 17/30
84/84 [==============================] - 16s 188ms/step - loss: 0.3412 - accuracy: 0.9581 - val_loss: 1.3272 - val_accuracy: 0.6457
Epoch 18/30
84/84 [==============================] - 16s 188ms/step - loss: 0.3409 - accuracy: 0.9595 - val_loss: 1.3521 - val_accuracy: 0.6170
Epoch 19/30
84/84 [==============================] - 16s 188ms/step - loss: 0.3230 - accuracy: 0.9639 - val_loss: 1.2845 - val_accuracy: 0.6507
Epoch 20/30
84/84 [==============================] - 16s 188ms/step - loss: 0.3228 - accuracy: 0.9612 - val_loss: 1.4780 - val_accuracy: 0.6492
Epoch 21/30
84/84 [==============================] - 16s 188ms/step - loss: 0.3077 - accuracy: 0.9652 - val_loss: 1.3170 - val_accuracy: 0.6436
Epoch 22/30
84/84 [==============================] - 16s 188ms/step - loss: 0.3009 - accuracy: 0.9643 - val_loss: 1.3566 - val_accuracy: 0.6129
Epoch 23/30
84/84 [==============================] - 16s 188ms/step - loss: 0.3046 - accuracy: 0.9624 - val_loss: 1.3607 - val_accuracy: 0.6298
Epoch 24/30
84/84 [==============================] - 16s 189ms/step - loss: 0.2881 - accuracy: 0.9676 - val_loss: 1.4102 - val_accuracy: 0.6155
Epoch 25/30
84/84 [==============================] - 16s 188ms/step - loss: 0.2842 - accuracy: 0.9681 - val_loss: 1.3888 - val_accuracy: 0.6371
Epoch 26/30
84/84 [==============================] - 16s 190ms/step - loss: 0.2858 - accuracy: 0.9674 - val_loss: 1.5090 - val_accuracy: 0.6326
Epoch 27/30
84/84 [==============================] - 16s 188ms/step - loss: 0.2910 - accuracy: 0.9623 - val_loss: 1.4398 - val_accuracy: 0.6146
Epoch 28/30
84/84 [==============================] - 16s 189ms/step - loss: 0.2922 - accuracy: 0.9614 - val_loss: 1.5311 - val_accuracy: 0.6504
Epoch 29/30
84/84 [==============================] - 16s 188ms/step - loss: 0.2902 - accuracy: 0.9650 - val_loss: 1.3921 - val_accuracy: 0.6416
Epoch 30/30
84/84 [==============================] - 16s 188ms/step - loss: 0.2711 - accuracy: 0.9694 - val_loss: 1.3921 - val_accuracy: 0.6251
from tensorflow.keras.models import save_model, load_model
save_model(model_CNN_resnet, images_path + "model_TL_Resnet50.h5")
pickled_model_resnet = load_model(images_path + 'model_TL_Resnet50.h5')
#Resnet - train data
from tensorflow.keras.applications.resnet50 import preprocess_input
#load data if using pickle
X_train_rs = preprocess_input(X_train_rgb)
X_test_rs = preprocess_input(X_test_rgb)
pickled_model_resnet = pickle.load(open(images_path + 'model_TL_Resnet50.pkl', 'rb'))
pickled_model_resnet = load_model(images_path + 'model_TL_Resnet50.h5')
y_pred = pickled_model_resnet.predict(X_train_rs)
y_pred_final=[]
for i in y_pred:
y_pred_final.append(np.argmax(i))
Y_train_true = Y_train.idxmax(axis=1)
#This is for all the training data (includes the validation data during the training)
print(classification_report(y_pred_final, Y_train_true))
cm=confusion_matrix(Y_train_true, y_pred_final)
plt.figure(figsize=(10,7))
sns.heatmap(cm,annot=True,fmt='d')
plt.xlabel('Truth')
plt.ylabel('Predicted')
plt.show()
#get the accuracy no.s
train_acc = cm.diagonal()/cm.sum(axis=1)
train_acc_overall = accuracy_score(Y_train_true, y_pred_final)
#metrics for the test data -> this data we have not seen yet.
y_pred_test = pickled_model_resnet.predict(X_test_rs)
y_pred_test_final=[]
for i in y_pred_test:
#index 0 is 'Lung Opacity', 1 is 'Normal' or 'No Pnemonia'
y_pred_test_final.append(np.argmax(i))
Y_test_true = Y_test.idxmax(axis=1)
#This is for all the training data (includes the validation data during the training)
print(classification_report(y_pred_test_final, Y_test_true))
cm=confusion_matrix(Y_test_true, y_pred_test_final)
plt.figure(figsize=(10,7))
sns.heatmap(cm,annot=True,fmt='d')
plt.xlabel('Truth')
plt.ylabel('Predicted')
plt.show()
#get the accuracy no.s
test_acc = cm.diagonal()/cm.sum(axis=1)
test_acc_overall = accuracy_score(Y_test_true, y_pred_test_final)
#Put the model and Accuracy figures for test and train data in a dataframe
perf_dict = {'Model':'TL Resnet50'}
perf_dict['Tr_Acc Overall '] = train_acc_overall
for i in range(len(train_acc)):
perf_dict['Tr_Acc ' + list(le.classes_)[i]] = train_acc[i]
perf_dict['Ts_Acc Overall '] = test_acc_overall
for i in range(len(test_acc)):
perf_dict['Ts_Acc ' + list(le.classes_)[i]] = test_acc[i]
df_results = pd.read_excel(images_path + 'df_results.xlsx')
df_results.loc[len(df_results.index)] = list(perf_dict.values())
df_results.to_excel(images_path + 'df_results.xlsx', index=False)
668/668 [==============================] - 16s 22ms/step
precision recall f1-score support
0 1.00 0.98 0.99 4891
1 0.99 1.00 0.99 9340
2 1.00 0.99 1.00 7116
accuracy 0.99 21347
macro avg 0.99 0.99 0.99 21347
weighted avg 0.99 0.99 0.99 21347
167/167 [==============================] - 4s 24ms/step
precision recall f1-score support
0 0.54 0.51 0.52 1276
1 0.55 0.62 0.58 2091
2 0.79 0.71 0.74 1970
accuracy 0.63 5337
macro avg 0.62 0.61 0.62 5337
weighted avg 0.63 0.63 0.63 5337
#Transfer learning - Vggnet
from tensorflow.keras.applications.vgg16 import preprocess_input
X_train_vgg = preprocess_input(X_train_rgb)
X_test_vgg = preprocess_input(X_test_rgb)
def CNN_with_Vggnet():
model = Sequential()
base_model = VGG16(weights="imagenet", include_top=False, input_shape=X_train_vgg[0].shape)
base_model.trainable = False ## Not trainable weights
model.add(base_model)
model.add(Flatten())
model.add(Dense(units = 64, activation = 'relu', kernel_initializer='he_normal', kernel_regularizer='l2'))
model.add(BatchNormalization())
model.add(Dropout(0.4))
model.add(Dense(units = 3, activation = 'softmax', kernel_initializer='he_normal'))
return model
model_CNN_vggnet = CNN_with_Vggnet()
model_CNN_vggnet.summary()
opt = optimizers.Adam(learning_rate=0.0001)
model_CNN_vggnet.compile(optimizer = opt, loss = 'categorical_crossentropy', metrics = ['accuracy'])
# earlystop= EarlyStopping(monitor='val_loss', patience=3)
history4 = model_CNN_vggnet.fit(X_train_vgg, Y_train, batch_size=256, epochs=30, validation_data = (X_test_vgg,Y_test))
fig, axis = plt.subplots(1,2, figsize=(15,5))
axis[0].plot(history4.history['loss'])
axis[0].plot(history4.history['val_loss'])
axis[0].set_title('model loss')
axis[0].set_ylabel('loss')
axis[0].set_xlabel('epoch')
axis[0].legend(['training', 'validation'], loc='best')
axis[0].set_ylim(0, 10)
axis[1].plot(history4.history['accuracy'])
axis[1].plot(history4.history['val_accuracy'])
axis[1].set_title('model accuracy')
axis[1].set_ylabel('accuracy')
axis[1].set_xlabel('epoch')
axis[1].legend(['training', 'validation'], loc='best')
axis[1].set_ylim(0, 1)
plt.show()
#pickle only once, if you only want to make inferences after that
# pickle.dump(model_CNN_vggnet, open(images_path + 'model_TL_Vgg16.pkl', 'wb'))
#VGG - 96%, 62%
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
vgg16 (Functional) (None, 8, 8, 512) 14714688
flatten (Flatten) (None, 32768) 0
dense (Dense) (None, 64) 2097216
batch_normalization (BatchN (None, 64) 256
ormalization)
dropout (Dropout) (None, 64) 0
dense_1 (Dense) (None, 3) 195
=================================================================
Total params: 16,812,355
Trainable params: 2,097,539
Non-trainable params: 14,714,816
_________________________________________________________________
Epoch 1/30
84/84 [==============================] - 44s 371ms/step - loss: 2.3270 - accuracy: 0.5596 - val_loss: 1.9764 - val_accuracy: 0.6472
Epoch 2/30
84/84 [==============================] - 19s 227ms/step - loss: 1.8373 - accuracy: 0.6501 - val_loss: 1.7531 - val_accuracy: 0.6397
Epoch 3/30
84/84 [==============================] - 19s 228ms/step - loss: 1.5914 - accuracy: 0.6958 - val_loss: 1.5776 - val_accuracy: 0.6631
Epoch 4/30
84/84 [==============================] - 19s 227ms/step - loss: 1.4046 - accuracy: 0.7316 - val_loss: 1.5394 - val_accuracy: 0.6610
Epoch 5/30
84/84 [==============================] - 19s 227ms/step - loss: 1.2466 - accuracy: 0.7616 - val_loss: 1.4690 - val_accuracy: 0.6397
Epoch 6/30
84/84 [==============================] - 19s 227ms/step - loss: 1.1213 - accuracy: 0.7963 - val_loss: 1.4396 - val_accuracy: 0.6500
Epoch 7/30
84/84 [==============================] - 19s 228ms/step - loss: 1.0002 - accuracy: 0.8280 - val_loss: 1.3906 - val_accuracy: 0.6580
Epoch 8/30
84/84 [==============================] - 19s 227ms/step - loss: 0.9004 - accuracy: 0.8528 - val_loss: 1.3796 - val_accuracy: 0.6605
Epoch 9/30
84/84 [==============================] - 19s 229ms/step - loss: 0.8173 - accuracy: 0.8758 - val_loss: 1.3338 - val_accuracy: 0.6436
Epoch 10/30
84/84 [==============================] - 19s 228ms/step - loss: 0.7397 - accuracy: 0.9012 - val_loss: 1.3261 - val_accuracy: 0.6644
Epoch 11/30
84/84 [==============================] - 19s 227ms/step - loss: 0.6771 - accuracy: 0.9141 - val_loss: 1.3198 - val_accuracy: 0.6631
Epoch 12/30
84/84 [==============================] - 19s 228ms/step - loss: 0.6271 - accuracy: 0.9253 - val_loss: 1.2971 - val_accuracy: 0.6528
Epoch 13/30
84/84 [==============================] - 19s 228ms/step - loss: 0.5754 - accuracy: 0.9375 - val_loss: 1.2898 - val_accuracy: 0.6526
Epoch 14/30
84/84 [==============================] - 19s 228ms/step - loss: 0.5405 - accuracy: 0.9430 - val_loss: 1.3250 - val_accuracy: 0.6395
Epoch 15/30
84/84 [==============================] - 19s 228ms/step - loss: 0.5086 - accuracy: 0.9489 - val_loss: 1.3128 - val_accuracy: 0.6479
Epoch 16/30
84/84 [==============================] - 19s 227ms/step - loss: 0.4720 - accuracy: 0.9558 - val_loss: 1.3296 - val_accuracy: 0.6358
Epoch 17/30
84/84 [==============================] - 19s 226ms/step - loss: 0.4511 - accuracy: 0.9588 - val_loss: 1.3048 - val_accuracy: 0.6447
Epoch 18/30
84/84 [==============================] - 19s 228ms/step - loss: 0.4288 - accuracy: 0.9593 - val_loss: 1.3316 - val_accuracy: 0.6408
Epoch 19/30
84/84 [==============================] - 19s 226ms/step - loss: 0.4164 - accuracy: 0.9597 - val_loss: 1.3754 - val_accuracy: 0.6453
Epoch 20/30
84/84 [==============================] - 19s 227ms/step - loss: 0.4023 - accuracy: 0.9620 - val_loss: 1.3693 - val_accuracy: 0.6500
Epoch 21/30
84/84 [==============================] - 19s 228ms/step - loss: 0.3873 - accuracy: 0.9622 - val_loss: 1.2798 - val_accuracy: 0.6444
Epoch 22/30
84/84 [==============================] - 19s 227ms/step - loss: 0.3709 - accuracy: 0.9656 - val_loss: 1.3605 - val_accuracy: 0.6483
Epoch 23/30
84/84 [==============================] - 19s 229ms/step - loss: 0.3616 - accuracy: 0.9644 - val_loss: 1.3412 - val_accuracy: 0.6461
Epoch 24/30
84/84 [==============================] - 19s 228ms/step - loss: 0.3563 - accuracy: 0.9650 - val_loss: 1.4109 - val_accuracy: 0.6476
Epoch 25/30
84/84 [==============================] - 19s 227ms/step - loss: 0.3487 - accuracy: 0.9642 - val_loss: 1.2988 - val_accuracy: 0.6487
Epoch 26/30
84/84 [==============================] - 19s 227ms/step - loss: 0.3524 - accuracy: 0.9595 - val_loss: 1.3047 - val_accuracy: 0.6536
Epoch 27/30
84/84 [==============================] - 19s 227ms/step - loss: 0.3401 - accuracy: 0.9624 - val_loss: 1.2984 - val_accuracy: 0.6393
Epoch 28/30
84/84 [==============================] - 19s 227ms/step - loss: 0.3508 - accuracy: 0.9558 - val_loss: 1.3192 - val_accuracy: 0.6477
Epoch 29/30
84/84 [==============================] - 19s 226ms/step - loss: 0.3492 - accuracy: 0.9553 - val_loss: 1.5430 - val_accuracy: 0.6446
Epoch 30/30
84/84 [==============================] - 19s 227ms/step - loss: 0.3365 - accuracy: 0.9596 - val_loss: 1.4062 - val_accuracy: 0.6202
Keras weights file (<HDF5 file "variables.h5" (mode r+)>) saving: ...layers ......batch_normalization .........vars ............0 ............1 ............2 ............3 ......dense .........vars ............0 ............1 ......dense_1 .........vars ............0 ............1 ......dropout .........vars ......flatten .........vars ......functional .........layers ............conv2d ...............vars ..................0 ..................1 ............conv2d_1 ...............vars ..................0 ..................1 ............conv2d_10 ...............vars ..................0 ..................1 ............conv2d_11 ...............vars ..................0 ..................1 ............conv2d_12 ...............vars ..................0 ..................1 ............conv2d_2 ...............vars ..................0 ..................1 ............conv2d_3 ...............vars ..................0 ..................1 ............conv2d_4 ...............vars ..................0 ..................1 ............conv2d_5 ...............vars ..................0 ..................1 ............conv2d_6 ...............vars ..................0 ..................1 ............conv2d_7 ...............vars ..................0 ..................1 ............conv2d_8 ...............vars ..................0 ..................1 ............conv2d_9 ...............vars ..................0 ..................1 ............input_layer ...............vars ............max_pooling2d ...............vars ............max_pooling2d_1 ...............vars ............max_pooling2d_2 ...............vars ............max_pooling2d_3 ...............vars ............max_pooling2d_4 ...............vars .........vars ...metrics ......mean .........vars ............0 ............1 ......mean_metric_wrapper .........vars ............0 ............1 ...optimizer ......vars .........0 .........1 .........10 .........11 .........12 .........2 .........3 .........4 .........5 .........6 .........7 .........8 .........9 ...vars Keras model archive saving: File Name Modified Size metadata.json 2023-03-13 10:28:29 64 config.json 2023-03-13 10:28:29 12709 variables.h5 2023-03-13 10:28:29 84105728
#VGG - train data
from tensorflow.keras.applications.vgg16 import preprocess_input
pickled_model_vgg16 = pickle.load(open(images_path + 'model_TL_Vgg16.pkl', 'rb'))
X_train_vgg = preprocess_input(X_train_rgb)
X_test_vgg = preprocess_input(X_test_rgb)
y_pred = pickled_model_vgg16.predict(X_train_vgg)
y_pred_final=[]
for i in y_pred:
y_pred_final.append(np.argmax(i))
Y_train_true = Y_train.idxmax(axis=1)
#This is for all the training data (includes the validation data during the training)
print(classification_report(y_pred_final, Y_train_true))
cm=confusion_matrix(Y_train_true, y_pred_final)
plt.figure(figsize=(10,7))
sns.heatmap(cm,annot=True,fmt='d')
plt.xlabel('Truth')
plt.ylabel('Predicted')
plt.show()
#get the accuracy no.s
train_acc = cm.diagonal()/cm.sum(axis=1)
train_acc_overall = accuracy_score(Y_train_true, y_pred_final)
#metrics for the test data -> this data we have not seen yet.
y_pred_test = pickled_model_vgg16.predict(X_test_vgg)
y_pred_test_final=[]
for i in y_pred_test:
#index 0 is 'Lung Opacity', 1 is 'Normal' or 'No Pnemonia'
y_pred_test_final.append(np.argmax(i))
Y_test_true = Y_test.idxmax(axis=1)
#This is for all the training data (includes the validation data during the training)
print(classification_report(y_pred_test_final, Y_test_true))
cm=confusion_matrix(Y_test_true, y_pred_test_final)
plt.figure(figsize=(10,7))
sns.heatmap(cm,annot=True,fmt='d')
plt.xlabel('Truth')
plt.ylabel('Predicted')
plt.show()
#get the accuracy no.s
test_acc = cm.diagonal()/cm.sum(axis=1)
test_acc_overall = accuracy_score(Y_test_true, y_pred_test_final)
#Put the model and Accuracy figures for test and train data in a dataframe
perf_dict = {'Model':'TL Vgg16'}
perf_dict['Tr_Acc Overall '] = train_acc_overall
for i in range(len(train_acc)):
perf_dict['Tr_Acc ' + list(le.classes_)[i]] = train_acc[i]
perf_dict['Ts_Acc Overall '] = test_acc_overall
for i in range(len(test_acc)):
perf_dict['Ts_Acc ' + list(le.classes_)[i]] = test_acc[i]
df_results = pd.read_excel(images_path + 'df_results.xlsx')
df_results.loc[len(df_results.index)] = list(perf_dict.values())
df_results.to_excel(images_path + 'df_results.xlsx', index=False)
Keras model archive loading:
File Name Modified Size
metadata.json 2023-03-13 10:28:28 64
config.json 2023-03-13 10:28:28 12709
variables.h5 2023-03-13 10:28:28 84105728
Keras weights file (<HDF5 file "variables.h5" (mode r)>) loading:
...layers
......batch_normalization
.........vars
............0
............1
............2
............3
......dense
.........vars
............0
............1
......dense_1
.........vars
............0
............1
......dropout
.........vars
......flatten
.........vars
......functional
.........layers
............conv2d
...............vars
..................0
..................1
............conv2d_1
...............vars
..................0
..................1
............conv2d_10
...............vars
..................0
..................1
............conv2d_11
...............vars
..................0
..................1
............conv2d_12
...............vars
..................0
..................1
............conv2d_2
...............vars
..................0
..................1
............conv2d_3
...............vars
..................0
..................1
............conv2d_4
...............vars
..................0
..................1
............conv2d_5
...............vars
..................0
..................1
............conv2d_6
...............vars
..................0
..................1
............conv2d_7
...............vars
..................0
..................1
............conv2d_8
...............vars
..................0
..................1
............conv2d_9
...............vars
..................0
..................1
............input_layer
...............vars
............max_pooling2d
...............vars
............max_pooling2d_1
...............vars
............max_pooling2d_2
...............vars
............max_pooling2d_3
...............vars
............max_pooling2d_4
...............vars
.........vars
...metrics
......mean
.........vars
............0
............1
......mean_metric_wrapper
.........vars
............0
............1
...optimizer
......vars
.........0
.........1
.........10
.........11
.........12
.........2
.........3
.........4
.........5
.........6
.........7
.........8
.........9
...vars
668/668 [==============================] - 23s 22ms/step
precision recall f1-score support
0 1.00 0.95 0.97 5064
1 0.99 0.99 0.99 9455
2 0.96 1.00 0.98 6828
accuracy 0.98 21347
macro avg 0.98 0.98 0.98 21347
weighted avg 0.98 0.98 0.98 21347
167/167 [==============================] - 4s 25ms/step
precision recall f1-score support
0 0.59 0.45 0.51 1561
1 0.60 0.62 0.61 2288
2 0.67 0.80 0.73 1488
accuracy 0.62 5337
macro avg 0.62 0.62 0.62 5337
weighted avg 0.62 0.62 0.61 5337
#Transfer learning - InceptionResNetV2
from tensorflow.keras.applications.inception_resnet_v2 import preprocess_input
X_train_incepv2 = preprocess_input(X_train_rgb)
X_test_incepv2 = preprocess_input(X_test_rgb)
def CNN_with_InceptionV2():
model = Sequential()
base_model = InceptionResNetV2(weights='imagenet', include_top=False, input_shape=X_train_incepv2[0].shape)
base_model.trainable = False ## Not trainable weights
# for layer in base_model.layers:
# layer.trainable = False
model.add(base_model)
model.add(Flatten())
model.add(Dense(units = 64, activation = 'relu', kernel_initializer='he_normal', kernel_regularizer='l2'))
model.add(BatchNormalization())
model.add(Dropout(0.4))
model.add(Dense(units = 3, activation = 'softmax', kernel_initializer='he_normal'))
return model
model_CNN_InceptionV2 = CNN_with_InceptionV2()
model_CNN_InceptionV2.summary()
opt = optimizers.Adam(learning_rate=0.0001)
model_CNN_InceptionV2.compile(optimizer = opt, loss = 'categorical_crossentropy', metrics = ['accuracy'])
history5 = model_CNN_InceptionV2.fit(X_train_incepv2, Y_train, batch_size=256, epochs=30, validation_data = (X_test_incepv2,Y_test))
fig, axis = plt.subplots(1,2, figsize=(15,5))
axis[0].plot(history5.history['loss'])
axis[0].plot(history5.history['val_loss'])
axis[0].set_title('model loss')
axis[0].set_ylabel('loss')
axis[0].set_xlabel('epoch')
axis[0].legend(['training', 'validation'], loc='best')
axis[0].set_ylim(0, 10)
axis[1].plot(history5.history['accuracy'])
axis[1].plot(history5.history['val_accuracy'])
axis[1].set_title('model accuracy')
axis[1].set_ylabel('accuracy')
axis[1].set_xlabel('epoch')
axis[1].legend(['training', 'validation'], loc='best')
axis[1].set_ylim(0, 1)
plt.show()
#pickle only once, if you only want to make inferences after that
# pickle.dump(model_CNN_InceptionV2, open(images_path + 'model_TL_InceptionV2.pkl', 'wb'))
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/inception_resnet_v2/inception_resnet_v2_weights_tf_dim_ordering_tf_kernels_notop.h5
219055592/219055592 [==============================] - 1s 0us/step
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
inception_resnet_v2 (Functi (None, 6, 6, 1536) 54336736
onal)
flatten (Flatten) (None, 55296) 0
dense (Dense) (None, 64) 3539008
batch_normalization_203 (Ba (None, 64) 256
tchNormalization)
dropout (Dropout) (None, 64) 0
dense_1 (Dense) (None, 3) 195
=================================================================
Total params: 57,876,195
Trainable params: 3,539,331
Non-trainable params: 54,336,864
_________________________________________________________________
Epoch 1/30
84/84 [==============================] - 52s 417ms/step - loss: 2.1727 - accuracy: 0.5736 - val_loss: 2.2085 - val_accuracy: 0.5467
Epoch 2/30
84/84 [==============================] - 24s 289ms/step - loss: 1.7865 - accuracy: 0.6283 - val_loss: 1.5918 - val_accuracy: 0.6672
Epoch 3/30
84/84 [==============================] - 24s 290ms/step - loss: 1.5459 - accuracy: 0.6599 - val_loss: 1.4785 - val_accuracy: 0.6389
Epoch 4/30
84/84 [==============================] - 24s 290ms/step - loss: 1.3656 - accuracy: 0.6849 - val_loss: 1.3719 - val_accuracy: 0.6299
Epoch 5/30
84/84 [==============================] - 24s 290ms/step - loss: 1.2196 - accuracy: 0.7038 - val_loss: 1.2487 - val_accuracy: 0.6513
Epoch 6/30
84/84 [==============================] - 24s 289ms/step - loss: 1.1033 - accuracy: 0.7216 - val_loss: 1.2068 - val_accuracy: 0.6539
Epoch 7/30
84/84 [==============================] - 24s 289ms/step - loss: 1.0033 - accuracy: 0.7426 - val_loss: 1.1641 - val_accuracy: 0.6507
Epoch 8/30
84/84 [==============================] - 24s 288ms/step - loss: 0.9219 - accuracy: 0.7601 - val_loss: 1.1191 - val_accuracy: 0.6577
Epoch 9/30
84/84 [==============================] - 24s 289ms/step - loss: 0.8602 - accuracy: 0.7704 - val_loss: 1.0769 - val_accuracy: 0.6594
Epoch 10/30
84/84 [==============================] - 24s 288ms/step - loss: 0.7962 - accuracy: 0.7938 - val_loss: 1.2120 - val_accuracy: 0.6110
Epoch 11/30
84/84 [==============================] - 24s 289ms/step - loss: 0.7385 - accuracy: 0.8096 - val_loss: 1.0899 - val_accuracy: 0.6509
Epoch 12/30
84/84 [==============================] - 24s 292ms/step - loss: 0.7006 - accuracy: 0.8171 - val_loss: 1.5578 - val_accuracy: 0.5205
Epoch 13/30
84/84 [==============================] - 24s 290ms/step - loss: 0.6648 - accuracy: 0.8298 - val_loss: 1.4786 - val_accuracy: 0.5265
Epoch 14/30
84/84 [==============================] - 24s 289ms/step - loss: 0.6372 - accuracy: 0.8348 - val_loss: 1.3019 - val_accuracy: 0.5794
Epoch 15/30
84/84 [==============================] - 24s 288ms/step - loss: 0.6047 - accuracy: 0.8466 - val_loss: 1.3924 - val_accuracy: 0.5460
Epoch 16/30
84/84 [==============================] - 24s 289ms/step - loss: 0.5683 - accuracy: 0.8610 - val_loss: 1.3667 - val_accuracy: 0.5949
Epoch 17/30
84/84 [==============================] - 24s 290ms/step - loss: 0.5409 - accuracy: 0.8662 - val_loss: 1.1200 - val_accuracy: 0.6101
Epoch 18/30
84/84 [==============================] - 24s 289ms/step - loss: 0.5189 - accuracy: 0.8757 - val_loss: 1.4145 - val_accuracy: 0.5108
Epoch 19/30
84/84 [==============================] - 24s 288ms/step - loss: 0.4933 - accuracy: 0.8863 - val_loss: 1.1580 - val_accuracy: 0.6416
Epoch 20/30
84/84 [==============================] - 24s 289ms/step - loss: 0.4713 - accuracy: 0.8962 - val_loss: 1.4897 - val_accuracy: 0.5642
Epoch 21/30
84/84 [==============================] - 24s 289ms/step - loss: 0.4643 - accuracy: 0.8974 - val_loss: 1.3076 - val_accuracy: 0.6254
Epoch 22/30
84/84 [==============================] - 24s 289ms/step - loss: 0.4389 - accuracy: 0.9053 - val_loss: 1.4943 - val_accuracy: 0.5820
Epoch 23/30
84/84 [==============================] - 24s 288ms/step - loss: 0.4380 - accuracy: 0.9027 - val_loss: 1.2508 - val_accuracy: 0.6206
Epoch 24/30
84/84 [==============================] - 24s 290ms/step - loss: 0.4264 - accuracy: 0.9112 - val_loss: 1.3210 - val_accuracy: 0.5720
Epoch 25/30
84/84 [==============================] - 24s 290ms/step - loss: 0.4044 - accuracy: 0.9173 - val_loss: 1.9195 - val_accuracy: 0.4956
Epoch 26/30
84/84 [==============================] - 24s 290ms/step - loss: 0.3932 - accuracy: 0.9208 - val_loss: 1.3410 - val_accuracy: 0.6269
Epoch 27/30
84/84 [==============================] - 24s 289ms/step - loss: 0.3845 - accuracy: 0.9231 - val_loss: 1.2108 - val_accuracy: 0.6314
Epoch 28/30
84/84 [==============================] - 24s 289ms/step - loss: 0.3830 - accuracy: 0.9215 - val_loss: 1.4650 - val_accuracy: 0.6209
Epoch 29/30
84/84 [==============================] - 24s 289ms/step - loss: 0.3614 - accuracy: 0.9294 - val_loss: 1.2703 - val_accuracy: 0.6307
Epoch 30/30
84/84 [==============================] - 24s 289ms/step - loss: 0.3497 - accuracy: 0.9352 - val_loss: 1.3072 - val_accuracy: 0.5930
Keras weights file (<HDF5 file "variables.h5" (mode r+)>) saving: ...layers ......batch_normalization .........vars ............0 ............1 ............2 ............3 ......dense .........vars ............0 ............1 ......dense_1 .........vars ............0 ............1 ......dropout .........vars ......flatten .........vars ......functional .........layers ............activation ...............vars ............activation_1 ...............vars ............activation_10 ...............vars ............activation_100 ...............vars ............activation_101 ...............vars ............activation_102 ...............vars ............activation_103 ...............vars ............activation_104 ...............vars ............activation_105 ...............vars ............activation_106 ...............vars ............activation_107 ...............vars ............activation_108 ...............vars ............activation_109 ...............vars ............activation_11 ...............vars ............activation_110 ...............vars ............activation_111 ...............vars ............activation_112 ...............vars ............activation_113 ...............vars ............activation_114 ...............vars ............activation_115 ...............vars ............activation_116 ...............vars ............activation_117 ...............vars ............activation_118 ...............vars ............activation_119 ...............vars ............activation_12 ...............vars ............activation_120 ...............vars ............activation_121 ...............vars ............activation_122 ...............vars ............activation_123 ...............vars ............activation_124 ...............vars ............activation_125 ...............vars ............activation_126 ...............vars ............activation_127 ...............vars ............activation_128 ...............vars ............activation_129 ...............vars ............activation_13 ...............vars ............activation_130 ...............vars ............activation_131 ...............vars ............activation_132 ...............vars ............activation_133 ...............vars ............activation_134 ...............vars ............activation_135 ...............vars ............activation_136 ...............vars ............activation_137 ...............vars ............activation_138 ...............vars ............activation_139 ...............vars ............activation_14 ...............vars ............activation_140 ...............vars ............activation_141 ...............vars ............activation_142 ...............vars ............activation_143 ...............vars ............activation_144 ...............vars ............activation_145 ...............vars ............activation_146 ...............vars ............activation_147 ...............vars ............activation_148 ...............vars ............activation_149 ...............vars ............activation_15 ...............vars ............activation_150 ...............vars ............activation_151 ...............vars ............activation_152 ...............vars ............activation_153 ...............vars ............activation_154 ...............vars ............activation_155 ...............vars ............activation_156 ...............vars ............activation_157 ...............vars ............activation_158 ...............vars ............activation_159 ...............vars ............activation_16 ...............vars ............activation_160 ...............vars ............activation_161 ...............vars ............activation_162 ...............vars ............activation_163 ...............vars ............activation_164 ...............vars ............activation_165 ...............vars ............activation_166 ...............vars ............activation_167 ...............vars ............activation_168 ...............vars ............activation_169 ...............vars ............activation_17 ...............vars ............activation_170 ...............vars ............activation_171 ...............vars ............activation_172 ...............vars ............activation_173 ...............vars ............activation_174 ...............vars ............activation_175 ...............vars ............activation_176 ...............vars ............activation_177 ...............vars ............activation_178 ...............vars ............activation_179 ...............vars ............activation_18 ...............vars ............activation_180 ...............vars ............activation_181 ...............vars ............activation_182 ...............vars ............activation_183 ...............vars ............activation_184 ...............vars ............activation_185 ...............vars ............activation_186 ...............vars ............activation_187 ...............vars ............activation_188 ...............vars ............activation_189 ...............vars ............activation_19 ...............vars ............activation_190 ...............vars ............activation_191 ...............vars ............activation_192 ...............vars ............activation_193 ...............vars ............activation_194 ...............vars ............activation_195 ...............vars ............activation_196 ...............vars ............activation_197 ...............vars ............activation_198 ...............vars ............activation_199 ...............vars ............activation_2 ...............vars ............activation_20 ...............vars ............activation_200 ...............vars ............activation_201 ...............vars ............activation_202 ...............vars ............activation_203 ...............vars ............activation_204 ...............vars ............activation_205 ...............vars ............activation_206 ...............vars ............activation_207 ...............vars ............activation_208 ...............vars ............activation_209 ...............vars ............activation_21 ...............vars ............activation_210 ...............vars ............activation_211 ...............vars ............activation_212 ...............vars ............activation_213 ...............vars ............activation_214 ...............vars ............activation_215 ...............vars ............activation_216 ...............vars ............activation_217 ...............vars ............activation_218 ...............vars ............activation_219 ...............vars ............activation_22 ...............vars ............activation_220 ...............vars ............activation_221 ...............vars ............activation_222 ...............vars ............activation_223 ...............vars ............activation_224 ...............vars ............activation_225 ...............vars ............activation_226 ...............vars ............activation_227 ...............vars ............activation_228 ...............vars ............activation_229 ...............vars ............activation_23 ...............vars ............activation_230 ...............vars ............activation_231 ...............vars ............activation_232 ...............vars ............activation_233 ...............vars ............activation_234 ...............vars ............activation_235 ...............vars ............activation_236 ...............vars ............activation_237 ...............vars ............activation_238 ...............vars ............activation_239 ...............vars ............activation_24 ...............vars ............activation_240 ...............vars ............activation_241 ...............vars ............activation_242 ...............vars ............activation_25 ...............vars ............activation_26 ...............vars ............activation_27 ...............vars ............activation_28 ...............vars ............activation_29 ...............vars ............activation_3 ...............vars ............activation_30 ...............vars ............activation_31 ...............vars ............activation_32 ...............vars ............activation_33 ...............vars ............activation_34 ...............vars ............activation_35 ...............vars ............activation_36 ...............vars ............activation_37 ...............vars ............activation_38 ...............vars ............activation_39 ...............vars ............activation_4 ...............vars ............activation_40 ...............vars ............activation_41 ...............vars ............activation_42 ...............vars ............activation_43 ...............vars ............activation_44 ...............vars ............activation_45 ...............vars ............activation_46 ...............vars ............activation_47 ...............vars ............activation_48 ...............vars ............activation_49 ...............vars ............activation_5 ...............vars ............activation_50 ...............vars ............activation_51 ...............vars ............activation_52 ...............vars ............activation_53 ...............vars ............activation_54 ...............vars ............activation_55 ...............vars ............activation_56 ...............vars ............activation_57 ...............vars ............activation_58 ...............vars ............activation_59 ...............vars ............activation_6 ...............vars ............activation_60 ...............vars ............activation_61 ...............vars ............activation_62 ...............vars ............activation_63 ...............vars ............activation_64 ...............vars ............activation_65 ...............vars ............activation_66 ...............vars ............activation_67 ...............vars ............activation_68 ...............vars ............activation_69 ...............vars ............activation_7 ...............vars ............activation_70 ...............vars ............activation_71 ...............vars ............activation_72 ...............vars ............activation_73 ...............vars ............activation_74 ...............vars ............activation_75 ...............vars ............activation_76 ...............vars ............activation_77 ...............vars ............activation_78 ...............vars ............activation_79 ...............vars ............activation_8 ...............vars ............activation_80 ...............vars ............activation_81 ...............vars ............activation_82 ...............vars ............activation_83 ...............vars ............activation_84 ...............vars ............activation_85 ...............vars ............activation_86 ...............vars ............activation_87 ...............vars ............activation_88 ...............vars ............activation_89 ...............vars ............activation_9 ...............vars ............activation_90 ...............vars ............activation_91 ...............vars ............activation_92 ...............vars ............activation_93 ...............vars ............activation_94 ...............vars ............activation_95 ...............vars ............activation_96 ...............vars ............activation_97 ...............vars ............activation_98 ...............vars ............activation_99 ...............vars ............average_pooling2d ...............vars ............batch_normalization ...............vars ..................0 ..................1 ..................2 ............batch_normalization_1 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_10 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_100 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_101 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_102 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_103 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_104 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_105 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_106 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_107 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_108 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_109 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_11 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_110 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_111 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_112 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_113 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_114 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_115 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_116 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_117 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_118 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_119 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_12 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_120 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_121 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_122 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_123 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_124 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_125 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_126 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_127 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_128 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_129 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_13 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_130 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_131 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_132 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_133 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_134 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_135 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_136 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_137 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_138 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_139 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_14 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_140 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_141 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_142 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_143 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_144 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_145 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_146 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_147 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_148 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_149 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_15 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_150 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_151 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_152 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_153 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_154 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_155 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_156 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_157 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_158 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_159 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_16 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_160 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_161 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_162 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_163 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_164 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_165 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_166 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_167 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_168 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_169 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_17 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_170 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_171 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_172 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_173 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_174 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_175 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_176 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_177 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_178 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_179 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_18 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_180 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_181 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_182 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_183 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_184 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_185 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_186 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_187 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_188 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_189 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_19 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_190 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_191 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_192 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_193 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_194 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_195 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_196 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_197 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_198 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_199 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_2 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_20 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_200 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_201 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_202 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_203 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_21 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_22 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_23 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_24 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_25 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_26 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_27 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_28 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_29 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_3 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_30 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_31 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_32 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_33 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_34 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_35 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_36 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_37 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_38 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_39 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_4 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_40 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_41 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_42 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_43 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_44 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_45 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_46 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_47 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_48 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_49 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_5 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_50 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_51 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_52 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_53 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_54 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_55 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_56 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_57 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_58 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_59 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_6 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_60 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_61 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_62 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_63 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_64 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_65 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_66 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_67 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_68 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_69 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_7 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_70 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_71 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_72 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_73 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_74 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_75 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_76 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_77 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_78 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_79 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_8 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_80 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_81 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_82 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_83 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_84 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_85 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_86 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_87 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_88 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_89 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_9 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_90 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_91 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_92 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_93 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_94 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_95 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_96 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_97 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_98 ...............vars ..................0 ..................1 ..................2 ............batch_normalization_99 ...............vars ..................0 ..................1 ..................2 ............concatenate ...............vars ............concatenate_1 ...............vars ............concatenate_10 ...............vars ............concatenate_11 ...............vars ............concatenate_12 ...............vars ............concatenate_13 ...............vars ............concatenate_14 ...............vars ............concatenate_15 ...............vars ............concatenate_16 ...............vars ............concatenate_17 ...............vars ............concatenate_18 ...............vars ............concatenate_19 ...............vars ............concatenate_2 ...............vars ............concatenate_20 ...............vars ............concatenate_21 ...............vars ............concatenate_22 ...............vars ............concatenate_23 ...............vars ............concatenate_24 ...............vars ............concatenate_25 ...............vars ............concatenate_26 ...............vars ............concatenate_27 ...............vars ............concatenate_28 ...............vars ............concatenate_29 ...............vars ............concatenate_3 ...............vars ............concatenate_30 ...............vars ............concatenate_31 ...............vars ............concatenate_32 ...............vars ............concatenate_33 ...............vars ............concatenate_34 ...............vars ............concatenate_35 ...............vars ............concatenate_36 ...............vars ............concatenate_37 ...............vars ............concatenate_38 ...............vars ............concatenate_39 ...............vars ............concatenate_4 ...............vars ............concatenate_40 ...............vars ............concatenate_41 ...............vars ............concatenate_42 ...............vars ............concatenate_5 ...............vars ............concatenate_6 ...............vars ............concatenate_7 ...............vars ............concatenate_8 ...............vars ............concatenate_9 ...............vars ............conv2d ...............vars ..................0 ............conv2d_1 ...............vars ..................0 ............conv2d_10 ...............vars ..................0 ............conv2d_100 ...............vars ..................0 ..................1 ............conv2d_101 ...............vars ..................0 ............conv2d_102 ...............vars ..................0 ............conv2d_103 ...............vars ..................0 ............conv2d_104 ...............vars ..................0 ............conv2d_105 ...............vars ..................0 ..................1 ............conv2d_106 ...............vars ..................0 ............conv2d_107 ...............vars ..................0 ............conv2d_108 ...............vars ..................0 ............conv2d_109 ...............vars ..................0 ............conv2d_11 ...............vars ..................0 ............conv2d_110 ...............vars ..................0 ..................1 ............conv2d_111 ...............vars ..................0 ............conv2d_112 ...............vars ..................0 ............conv2d_113 ...............vars ..................0 ............conv2d_114 ...............vars ..................0 ............conv2d_115 ...............vars ..................0 ..................1 ............conv2d_116 ...............vars ..................0 ............conv2d_117 ...............vars ..................0 ............conv2d_118 ...............vars ..................0 ............conv2d_119 ...............vars ..................0 ............conv2d_12 ...............vars ..................0 ............conv2d_120 ...............vars ..................0 ..................1 ............conv2d_121 ...............vars ..................0 ............conv2d_122 ...............vars ..................0 ............conv2d_123 ...............vars ..................0 ............conv2d_124 ...............vars ..................0 ............conv2d_125 ...............vars ..................0 ..................1 ............conv2d_126 ...............vars ..................0 ............conv2d_127 ...............vars ..................0 ............conv2d_128 ...............vars ..................0 ............conv2d_129 ...............vars ..................0 ............conv2d_13 ...............vars ..................0 ............conv2d_130 ...............vars ..................0 ..................1 ............conv2d_131 ...............vars ..................0 ............conv2d_132 ...............vars ..................0 ............conv2d_133 ...............vars ..................0 ............conv2d_134 ...............vars ..................0 ............conv2d_135 ...............vars ..................0 ..................1 ............conv2d_136 ...............vars ..................0 ............conv2d_137 ...............vars ..................0 ............conv2d_138 ...............vars ..................0 ............conv2d_139 ...............vars ..................0 ............conv2d_14 ...............vars ..................0 ............conv2d_140 ...............vars ..................0 ..................1 ............conv2d_141 ...............vars ..................0 ............conv2d_142 ...............vars ..................0 ............conv2d_143 ...............vars ..................0 ............conv2d_144 ...............vars ..................0 ............conv2d_145 ...............vars ..................0 ..................1 ............conv2d_146 ...............vars ..................0 ............conv2d_147 ...............vars ..................0 ............conv2d_148 ...............vars ..................0 ............conv2d_149 ...............vars ..................0 ............conv2d_15 ...............vars ..................0 ............conv2d_150 ...............vars ..................0 ..................1 ............conv2d_151 ...............vars ..................0 ............conv2d_152 ...............vars ..................0 ............conv2d_153 ...............vars ..................0 ............conv2d_154 ...............vars ..................0 ............conv2d_155 ...............vars ..................0 ..................1 ............conv2d_156 ...............vars ..................0 ............conv2d_157 ...............vars ..................0 ............conv2d_158 ...............vars ..................0 ............conv2d_159 ...............vars ..................0 ............conv2d_16 ...............vars ..................0 ............conv2d_160 ...............vars ..................0 ..................1 ............conv2d_161 ...............vars ..................0 ............conv2d_162 ...............vars ..................0 ............conv2d_163 ...............vars ..................0 ............conv2d_164 ...............vars ..................0 ............conv2d_165 ...............vars ..................0 ..................1 ............conv2d_166 ...............vars ..................0 ............conv2d_167 ...............vars ..................0 ............conv2d_168 ...............vars ..................0 ............conv2d_169 ...............vars ..................0 ............conv2d_17 ...............vars ..................0 ............conv2d_170 ...............vars ..................0 ..................1 ............conv2d_171 ...............vars ..................0 ............conv2d_172 ...............vars ..................0 ............conv2d_173 ...............vars ..................0 ............conv2d_174 ...............vars ..................0 ............conv2d_175 ...............vars ..................0 ..................1 ............conv2d_176 ...............vars ..................0 ............conv2d_177 ...............vars ..................0 ............conv2d_178 ...............vars ..................0 ............conv2d_179 ...............vars ..................0 ............conv2d_18 ...............vars ..................0 ..................1 ............conv2d_180 ...............vars ..................0 ..................1 ............conv2d_181 ...............vars ..................0 ............conv2d_182 ...............vars ..................0 ............conv2d_183 ...............vars ..................0 ............conv2d_184 ...............vars ..................0 ............conv2d_185 ...............vars ..................0 ..................1 ............conv2d_186 ...............vars ..................0 ............conv2d_187 ...............vars ..................0 ............conv2d_188 ...............vars ..................0 ............conv2d_189 ...............vars ..................0 ............conv2d_19 ...............vars ..................0 ............conv2d_190 ...............vars ..................0 ............conv2d_191 ...............vars ..................0 ............conv2d_192 ...............vars ..................0 ............conv2d_193 ...............vars ..................0 ............conv2d_194 ...............vars ..................0 ............conv2d_195 ...............vars ..................0 ............conv2d_196 ...............vars ..................0 ............conv2d_197 ...............vars ..................0 ..................1 ............conv2d_198 ...............vars ..................0 ............conv2d_199 ...............vars ..................0 ............conv2d_2 ...............vars ..................0 ............conv2d_20 ...............vars ..................0 ............conv2d_200 ...............vars ..................0 ............conv2d_201 ...............vars ..................0 ............conv2d_202 ...............vars ..................0 ..................1 ............conv2d_203 ...............vars ..................0 ............conv2d_204 ...............vars ..................0 ............conv2d_205 ...............vars ..................0 ............conv2d_206 ...............vars ..................0 ............conv2d_207 ...............vars ..................0 ..................1 ............conv2d_208 ...............vars ..................0 ............conv2d_209 ...............vars ..................0 ............conv2d_21 ...............vars ..................0 ............conv2d_210 ...............vars ..................0 ............conv2d_211 ...............vars ..................0 ............conv2d_212 ...............vars ..................0 ..................1 ............conv2d_213 ...............vars ..................0 ............conv2d_214 ...............vars ..................0 ............conv2d_215 ...............vars ..................0 ............conv2d_216 ...............vars ..................0 ............conv2d_217 ...............vars ..................0 ..................1 ............conv2d_218 ...............vars ..................0 ............conv2d_219 ...............vars ..................0 ............conv2d_22 ...............vars ..................0 ............conv2d_220 ...............vars ..................0 ............conv2d_221 ...............vars ..................0 ............conv2d_222 ...............vars ..................0 ..................1 ............conv2d_223 ...............vars ..................0 ............conv2d_224 ...............vars ..................0 ............conv2d_225 ...............vars ..................0 ............conv2d_226 ...............vars ..................0 ............conv2d_227 ...............vars ..................0 ..................1 ............conv2d_228 ...............vars ..................0 ............conv2d_229 ...............vars ..................0 ............conv2d_23 ...............vars ..................0 ............conv2d_230 ...............vars ..................0 ............conv2d_231 ...............vars ..................0 ............conv2d_232 ...............vars ..................0 ..................1 ............conv2d_233 ...............vars ..................0 ............conv2d_234 ...............vars ..................0 ............conv2d_235 ...............vars ..................0 ............conv2d_236 ...............vars ..................0 ............conv2d_237 ...............vars ..................0 ..................1 ............conv2d_238 ...............vars ..................0 ............conv2d_239 ...............vars ..................0 ............conv2d_24 ...............vars ..................0 ............conv2d_240 ...............vars ..................0 ............conv2d_241 ...............vars ..................0 ............conv2d_242 ...............vars ..................0 ..................1 ............conv2d_243 ...............vars ..................0 ............conv2d_25 ...............vars ..................0 ..................1 ............conv2d_26 ...............vars ..................0 ............conv2d_27 ...............vars ..................0 ............conv2d_28 ...............vars ..................0 ............conv2d_29 ...............vars ..................0 ............conv2d_3 ...............vars ..................0 ............conv2d_30 ...............vars ..................0 ............conv2d_31 ...............vars ..................0 ............conv2d_32 ...............vars ..................0 ..................1 ............conv2d_33 ...............vars ..................0 ............conv2d_34 ...............vars ..................0 ............conv2d_35 ...............vars ..................0 ............conv2d_36 ...............vars ..................0 ............conv2d_37 ...............vars ..................0 ............conv2d_38 ...............vars ..................0 ............conv2d_39 ...............vars ..................0 ..................1 ............conv2d_4 ...............vars ..................0 ............conv2d_40 ...............vars ..................0 ............conv2d_41 ...............vars ..................0 ............conv2d_42 ...............vars ..................0 ............conv2d_43 ...............vars ..................0 ............conv2d_44 ...............vars ..................0 ............conv2d_45 ...............vars ..................0 ............conv2d_46 ...............vars ..................0 ..................1 ............conv2d_47 ...............vars ..................0 ............conv2d_48 ...............vars ..................0 ............conv2d_49 ...............vars ..................0 ............conv2d_5 ...............vars ..................0 ............conv2d_50 ...............vars ..................0 ............conv2d_51 ...............vars ..................0 ............conv2d_52 ...............vars ..................0 ............conv2d_53 ...............vars ..................0 ..................1 ............conv2d_54 ...............vars ..................0 ............conv2d_55 ...............vars ..................0 ............conv2d_56 ...............vars ..................0 ............conv2d_57 ...............vars ..................0 ............conv2d_58 ...............vars ..................0 ............conv2d_59 ...............vars ..................0 ............conv2d_6 ...............vars ..................0 ............conv2d_60 ...............vars ..................0 ..................1 ............conv2d_61 ...............vars ..................0 ............conv2d_62 ...............vars ..................0 ............conv2d_63 ...............vars ..................0 ............conv2d_64 ...............vars ..................0 ............conv2d_65 ...............vars ..................0 ............conv2d_66 ...............vars ..................0 ............conv2d_67 ...............vars ..................0 ..................1 ............conv2d_68 ...............vars ..................0 ............conv2d_69 ...............vars ..................0 ............conv2d_7 ...............vars ..................0 ............conv2d_70 ...............vars ..................0 ............conv2d_71 ...............vars ..................0 ............conv2d_72 ...............vars ..................0 ............conv2d_73 ...............vars ..................0 ............conv2d_74 ...............vars ..................0 ..................1 ............conv2d_75 ...............vars ..................0 ............conv2d_76 ...............vars ..................0 ............conv2d_77 ...............vars ..................0 ............conv2d_78 ...............vars ..................0 ............conv2d_79 ...............vars ..................0 ............conv2d_8 ...............vars ..................0 ............conv2d_80 ...............vars ..................0 ............conv2d_81 ...............vars ..................0 ..................1 ............conv2d_82 ...............vars ..................0 ............conv2d_83 ...............vars ..................0 ............conv2d_84 ...............vars ..................0 ............conv2d_85 ...............vars ..................0 ............conv2d_86 ...............vars ..................0 ............conv2d_87 ...............vars ..................0 ............conv2d_88 ...............vars ..................0 ............conv2d_89 ...............vars ..................0 ............conv2d_9 ...............vars ..................0 ............conv2d_90 ...............vars ..................0 ..................1 ............conv2d_91 ...............vars ..................0 ............conv2d_92 ...............vars ..................0 ............conv2d_93 ...............vars ..................0 ............conv2d_94 ...............vars ..................0 ............conv2d_95 ...............vars ..................0 ..................1 ............conv2d_96 ...............vars ..................0 ............conv2d_97 ...............vars ..................0 ............conv2d_98 ...............vars ..................0 ............conv2d_99 ...............vars ..................0 ............input_layer ...............vars ............lambda ...............vars ............lambda_1 ...............vars ............lambda_10 ...............vars ............lambda_11 ...............vars ............lambda_12 ...............vars ............lambda_13 ...............vars ............lambda_14 ...............vars ............lambda_15 ...............vars ............lambda_16 ...............vars ............lambda_17 ...............vars ............lambda_18 ...............vars ............lambda_19 ...............vars ............lambda_2 ...............vars ............lambda_20 ...............vars ............lambda_21 ...............vars ............lambda_22 ...............vars ............lambda_23 ...............vars ............lambda_24 ...............vars ............lambda_25 ...............vars ............lambda_26 ...............vars ............lambda_27 ...............vars ............lambda_28 ...............vars ............lambda_29 ...............vars ............lambda_3 ...............vars ............lambda_30 ...............vars ............lambda_31 ...............vars ............lambda_32 ...............vars ............lambda_33 ...............vars ............lambda_34 ...............vars ............lambda_35 ...............vars ............lambda_36 ...............vars ............lambda_37 ...............vars ............lambda_38 ...............vars ............lambda_39 ...............vars ............lambda_4 ...............vars ............lambda_5 ...............vars ............lambda_6 ...............vars ............lambda_7 ...............vars ............lambda_8 ...............vars ............lambda_9 ...............vars ............max_pooling2d ...............vars ............max_pooling2d_1 ...............vars ............max_pooling2d_2 ...............vars ............max_pooling2d_3 ...............vars .........vars ...metrics ......mean .........vars ............0 ............1 ......mean_metric_wrapper .........vars ............0 ............1 ...optimizer ......vars .........0 .........1 .........10 .........11 .........12 .........2 .........3 .........4 .........5 .........6 .........7 .........8 .........9 ...vars Keras model archive saving: File Name Modified Size metadata.json 2023-03-13 15:13:04 64 config.json 2023-03-13 15:13:04 380817 variables.h5 2023-03-13 15:13:07 261701504
from tensorflow.keras.applications.inception_resnet_v2 import preprocess_input
pickled_model_inceptionv2 = pickle.load(open(images_path + 'model_TL_InceptionV2.pkl', 'rb'))
X_train_incepv2 = preprocess_input(X_train_rgb)
X_test_incepv2 = preprocess_input(X_test_rgb)
y_pred = pickled_model_inceptionv2.predict(X_train_incepv2)
y_pred_final=[]
for i in y_pred:
y_pred_final.append(np.argmax(i))
Y_train_true = Y_train.idxmax(axis=1)
#This is for all the training data (includes the validation data during the training)
print(classification_report(y_pred_final, Y_train_true))
cm=confusion_matrix(Y_train_true, y_pred_final)
plt.figure(figsize=(10,7))
sns.heatmap(cm,annot=True,fmt='d')
plt.xlabel('Truth')
plt.ylabel('Predicted')
plt.show()
#get the accuracy no.s
train_acc = cm.diagonal()/cm.sum(axis=1)
train_acc_overall = accuracy_score(Y_train_true, y_pred_final)
#metrics for the test data -> this data we have not seen yet.
y_pred_test = pickled_model_inceptionv2.predict(X_test_incepv2)
y_pred_test_final=[]
for i in y_pred_test:
#index 0 is 'Lung Opacity', 1 is 'Normal' or 'No Pnemonia'
y_pred_test_final.append(np.argmax(i))
Y_test_true = Y_test.idxmax(axis=1)
#This is for all the training data (includes the validation data during the training)
print(classification_report(y_pred_test_final, Y_test_true))
cm=confusion_matrix(Y_test_true, y_pred_test_final)
plt.figure(figsize=(10,7))
sns.heatmap(cm,annot=True,fmt='d')
plt.xlabel('Truth')
plt.ylabel('Predicted')
plt.show()
#get the accuracy no.s
test_acc = cm.diagonal()/cm.sum(axis=1)
test_acc_overall = accuracy_score(Y_test_true, y_pred_test_final)
#Put the model and Accuracy figures for test and train data in a dataframe
perf_dict = {'Model':'TL InceptionV2'}
perf_dict['Tr_Acc Overall '] = train_acc_overall
for i in range(len(train_acc)):
perf_dict['Tr_Acc ' + list(le.classes_)[i]] = train_acc[i]
perf_dict['Ts_Acc Overall '] = test_acc_overall
for i in range(len(test_acc)):
perf_dict['Ts_Acc ' + list(le.classes_)[i]] = test_acc[i]
df_results = pd.read_excel(images_path + 'df_results.xlsx')
df_results.loc[len(df_results.index)] = list(perf_dict.values())
df_results.to_excel(images_path + 'df_results.xlsx', index=False)
Keras model archive loading:
File Name Modified Size
metadata.json 2023-03-13 15:13:04 64
config.json 2023-03-13 15:13:04 380817
variables.h5 2023-03-13 15:13:06 261701504
Keras weights file (<HDF5 file "variables.h5" (mode r)>) loading:
...layers
......batch_normalization
.........vars
............0
............1
............2
............3
......dense
.........vars
............0
............1
......dense_1
.........vars
............0
............1
......dropout
.........vars
......flatten
.........vars
......functional
.........layers
............activation
...............vars
............activation_1
...............vars
............activation_10
...............vars
............activation_100
...............vars
............activation_101
...............vars
............activation_102
...............vars
............activation_103
...............vars
............activation_104
...............vars
............activation_105
...............vars
............activation_106
...............vars
............activation_107
...............vars
............activation_108
...............vars
............activation_109
...............vars
............activation_11
...............vars
............activation_110
...............vars
............activation_111
...............vars
............activation_112
...............vars
............activation_113
...............vars
............activation_114
...............vars
............activation_115
...............vars
............activation_116
...............vars
............activation_117
...............vars
............activation_118
...............vars
............activation_119
...............vars
............activation_12
...............vars
............activation_120
...............vars
............activation_121
...............vars
............activation_122
...............vars
............activation_123
...............vars
............activation_124
...............vars
............activation_125
...............vars
............activation_126
...............vars
............activation_127
...............vars
............activation_128
...............vars
............activation_129
...............vars
............activation_13
...............vars
............activation_130
...............vars
............activation_131
...............vars
............activation_132
...............vars
............activation_133
...............vars
............activation_134
...............vars
............activation_135
...............vars
............activation_136
...............vars
............activation_137
...............vars
............activation_138
...............vars
............activation_139
...............vars
............activation_14
...............vars
............activation_140
...............vars
............activation_141
...............vars
............activation_142
...............vars
............activation_143
...............vars
............activation_144
...............vars
............activation_145
...............vars
............activation_146
...............vars
............activation_147
...............vars
............activation_148
...............vars
............activation_149
...............vars
............activation_15
...............vars
............activation_150
...............vars
............activation_151
...............vars
............activation_152
...............vars
............activation_153
...............vars
............activation_154
...............vars
............activation_155
...............vars
............activation_156
...............vars
............activation_157
...............vars
............activation_158
...............vars
............activation_159
...............vars
............activation_16
...............vars
............activation_160
...............vars
............activation_161
...............vars
............activation_162
...............vars
............activation_163
...............vars
............activation_164
...............vars
............activation_165
...............vars
............activation_166
...............vars
............activation_167
...............vars
............activation_168
...............vars
............activation_169
...............vars
............activation_17
...............vars
............activation_170
...............vars
............activation_171
...............vars
............activation_172
...............vars
............activation_173
...............vars
............activation_174
...............vars
............activation_175
...............vars
............activation_176
...............vars
............activation_177
...............vars
............activation_178
...............vars
............activation_179
...............vars
............activation_18
...............vars
............activation_180
...............vars
............activation_181
...............vars
............activation_182
...............vars
............activation_183
...............vars
............activation_184
...............vars
............activation_185
...............vars
............activation_186
...............vars
............activation_187
...............vars
............activation_188
...............vars
............activation_189
...............vars
............activation_19
...............vars
............activation_190
...............vars
............activation_191
...............vars
............activation_192
...............vars
............activation_193
...............vars
............activation_194
...............vars
............activation_195
...............vars
............activation_196
...............vars
............activation_197
...............vars
............activation_198
...............vars
............activation_199
...............vars
............activation_2
...............vars
............activation_20
...............vars
............activation_200
...............vars
............activation_201
...............vars
............activation_202
...............vars
............activation_203
...............vars
............activation_204
...............vars
............activation_205
...............vars
............activation_206
...............vars
............activation_207
...............vars
............activation_208
...............vars
............activation_209
...............vars
............activation_21
...............vars
............activation_210
...............vars
............activation_211
...............vars
............activation_212
...............vars
............activation_213
...............vars
............activation_214
...............vars
............activation_215
...............vars
............activation_216
...............vars
............activation_217
...............vars
............activation_218
...............vars
............activation_219
...............vars
............activation_22
...............vars
............activation_220
...............vars
............activation_221
...............vars
............activation_222
...............vars
............activation_223
...............vars
............activation_224
...............vars
............activation_225
...............vars
............activation_226
...............vars
............activation_227
...............vars
............activation_228
...............vars
............activation_229
...............vars
............activation_23
...............vars
............activation_230
...............vars
............activation_231
...............vars
............activation_232
...............vars
............activation_233
...............vars
............activation_234
...............vars
............activation_235
...............vars
............activation_236
...............vars
............activation_237
...............vars
............activation_238
...............vars
............activation_239
...............vars
............activation_24
...............vars
............activation_240
...............vars
............activation_241
...............vars
............activation_242
...............vars
............activation_25
...............vars
............activation_26
...............vars
............activation_27
...............vars
............activation_28
...............vars
............activation_29
...............vars
............activation_3
...............vars
............activation_30
...............vars
............activation_31
...............vars
............activation_32
...............vars
............activation_33
...............vars
............activation_34
...............vars
............activation_35
...............vars
............activation_36
...............vars
............activation_37
...............vars
............activation_38
...............vars
............activation_39
...............vars
............activation_4
...............vars
............activation_40
...............vars
............activation_41
...............vars
............activation_42
...............vars
............activation_43
...............vars
............activation_44
...............vars
............activation_45
...............vars
............activation_46
...............vars
............activation_47
...............vars
............activation_48
...............vars
............activation_49
...............vars
............activation_5
...............vars
............activation_50
...............vars
............activation_51
...............vars
............activation_52
...............vars
............activation_53
...............vars
............activation_54
...............vars
............activation_55
...............vars
............activation_56
...............vars
............activation_57
...............vars
............activation_58
...............vars
............activation_59
...............vars
............activation_6
...............vars
............activation_60
...............vars
............activation_61
...............vars
............activation_62
...............vars
............activation_63
...............vars
............activation_64
...............vars
............activation_65
...............vars
............activation_66
...............vars
............activation_67
...............vars
............activation_68
...............vars
............activation_69
...............vars
............activation_7
...............vars
............activation_70
...............vars
............activation_71
...............vars
............activation_72
...............vars
............activation_73
...............vars
............activation_74
...............vars
............activation_75
...............vars
............activation_76
...............vars
............activation_77
...............vars
............activation_78
...............vars
............activation_79
...............vars
............activation_8
...............vars
............activation_80
...............vars
............activation_81
...............vars
............activation_82
...............vars
............activation_83
...............vars
............activation_84
...............vars
............activation_85
...............vars
............activation_86
...............vars
............activation_87
...............vars
............activation_88
...............vars
............activation_89
...............vars
............activation_9
...............vars
............activation_90
...............vars
............activation_91
...............vars
............activation_92
...............vars
............activation_93
...............vars
............activation_94
...............vars
............activation_95
...............vars
............activation_96
...............vars
............activation_97
...............vars
............activation_98
...............vars
............activation_99
...............vars
............average_pooling2d
...............vars
............batch_normalization
...............vars
..................0
..................1
..................2
............batch_normalization_1
...............vars
..................0
..................1
..................2
............batch_normalization_10
...............vars
..................0
..................1
..................2
............batch_normalization_100
...............vars
..................0
..................1
..................2
............batch_normalization_101
...............vars
..................0
..................1
..................2
............batch_normalization_102
...............vars
..................0
..................1
..................2
............batch_normalization_103
...............vars
..................0
..................1
..................2
............batch_normalization_104
...............vars
..................0
..................1
..................2
............batch_normalization_105
...............vars
..................0
..................1
..................2
............batch_normalization_106
...............vars
..................0
..................1
..................2
............batch_normalization_107
...............vars
..................0
..................1
..................2
............batch_normalization_108
...............vars
..................0
..................1
..................2
............batch_normalization_109
...............vars
..................0
..................1
..................2
............batch_normalization_11
...............vars
..................0
..................1
..................2
............batch_normalization_110
...............vars
..................0
..................1
..................2
............batch_normalization_111
...............vars
..................0
..................1
..................2
............batch_normalization_112
...............vars
..................0
..................1
..................2
............batch_normalization_113
...............vars
..................0
..................1
..................2
............batch_normalization_114
...............vars
..................0
..................1
..................2
............batch_normalization_115
...............vars
..................0
..................1
..................2
............batch_normalization_116
...............vars
..................0
..................1
..................2
............batch_normalization_117
...............vars
..................0
..................1
..................2
............batch_normalization_118
...............vars
..................0
..................1
..................2
............batch_normalization_119
...............vars
..................0
..................1
..................2
............batch_normalization_12
...............vars
..................0
..................1
..................2
............batch_normalization_120
...............vars
..................0
..................1
..................2
............batch_normalization_121
...............vars
..................0
..................1
..................2
............batch_normalization_122
...............vars
..................0
..................1
..................2
............batch_normalization_123
...............vars
..................0
..................1
..................2
............batch_normalization_124
...............vars
..................0
..................1
..................2
............batch_normalization_125
...............vars
..................0
..................1
..................2
............batch_normalization_126
...............vars
..................0
..................1
..................2
............batch_normalization_127
...............vars
..................0
..................1
..................2
............batch_normalization_128
...............vars
..................0
..................1
..................2
............batch_normalization_129
...............vars
..................0
..................1
..................2
............batch_normalization_13
...............vars
..................0
..................1
..................2
............batch_normalization_130
...............vars
..................0
..................1
..................2
............batch_normalization_131
...............vars
..................0
..................1
..................2
............batch_normalization_132
...............vars
..................0
..................1
..................2
............batch_normalization_133
...............vars
..................0
..................1
..................2
............batch_normalization_134
...............vars
..................0
..................1
..................2
............batch_normalization_135
...............vars
..................0
..................1
..................2
............batch_normalization_136
...............vars
..................0
..................1
..................2
............batch_normalization_137
...............vars
..................0
..................1
..................2
............batch_normalization_138
...............vars
..................0
..................1
..................2
............batch_normalization_139
...............vars
..................0
..................1
..................2
............batch_normalization_14
...............vars
..................0
..................1
..................2
............batch_normalization_140
...............vars
..................0
..................1
..................2
............batch_normalization_141
...............vars
..................0
..................1
..................2
............batch_normalization_142
...............vars
..................0
..................1
..................2
............batch_normalization_143
...............vars
..................0
..................1
..................2
............batch_normalization_144
...............vars
..................0
..................1
..................2
............batch_normalization_145
...............vars
..................0
..................1
..................2
............batch_normalization_146
...............vars
..................0
..................1
..................2
............batch_normalization_147
...............vars
..................0
..................1
..................2
............batch_normalization_148
...............vars
..................0
..................1
..................2
............batch_normalization_149
...............vars
..................0
..................1
..................2
............batch_normalization_15
...............vars
..................0
..................1
..................2
............batch_normalization_150
...............vars
..................0
..................1
..................2
............batch_normalization_151
...............vars
..................0
..................1
..................2
............batch_normalization_152
...............vars
..................0
..................1
..................2
............batch_normalization_153
...............vars
..................0
..................1
..................2
............batch_normalization_154
...............vars
..................0
..................1
..................2
............batch_normalization_155
...............vars
..................0
..................1
..................2
............batch_normalization_156
...............vars
..................0
..................1
..................2
............batch_normalization_157
...............vars
..................0
..................1
..................2
............batch_normalization_158
...............vars
..................0
..................1
..................2
............batch_normalization_159
...............vars
..................0
..................1
..................2
............batch_normalization_16
...............vars
..................0
..................1
..................2
............batch_normalization_160
...............vars
..................0
..................1
..................2
............batch_normalization_161
...............vars
..................0
..................1
..................2
............batch_normalization_162
...............vars
..................0
..................1
..................2
............batch_normalization_163
...............vars
..................0
..................1
..................2
............batch_normalization_164
...............vars
..................0
..................1
..................2
............batch_normalization_165
...............vars
..................0
..................1
..................2
............batch_normalization_166
...............vars
..................0
..................1
..................2
............batch_normalization_167
...............vars
..................0
..................1
..................2
............batch_normalization_168
...............vars
..................0
..................1
..................2
............batch_normalization_169
...............vars
..................0
..................1
..................2
............batch_normalization_17
...............vars
..................0
..................1
..................2
............batch_normalization_170
...............vars
..................0
..................1
..................2
............batch_normalization_171
...............vars
..................0
..................1
..................2
............batch_normalization_172
...............vars
..................0
..................1
..................2
............batch_normalization_173
...............vars
..................0
..................1
..................2
............batch_normalization_174
...............vars
..................0
..................1
..................2
............batch_normalization_175
...............vars
..................0
..................1
..................2
............batch_normalization_176
...............vars
..................0
..................1
..................2
............batch_normalization_177
...............vars
..................0
..................1
..................2
............batch_normalization_178
...............vars
..................0
..................1
..................2
............batch_normalization_179
...............vars
..................0
..................1
..................2
............batch_normalization_18
...............vars
..................0
..................1
..................2
............batch_normalization_180
...............vars
..................0
..................1
..................2
............batch_normalization_181
...............vars
..................0
..................1
..................2
............batch_normalization_182
...............vars
..................0
..................1
..................2
............batch_normalization_183
...............vars
..................0
..................1
..................2
............batch_normalization_184
...............vars
..................0
..................1
..................2
............batch_normalization_185
...............vars
..................0
..................1
..................2
............batch_normalization_186
...............vars
..................0
..................1
..................2
............batch_normalization_187
...............vars
..................0
..................1
..................2
............batch_normalization_188
...............vars
..................0
..................1
..................2
............batch_normalization_189
...............vars
..................0
..................1
..................2
............batch_normalization_19
...............vars
..................0
..................1
..................2
............batch_normalization_190
...............vars
..................0
..................1
..................2
............batch_normalization_191
...............vars
..................0
..................1
..................2
............batch_normalization_192
...............vars
..................0
..................1
..................2
............batch_normalization_193
...............vars
..................0
..................1
..................2
............batch_normalization_194
...............vars
..................0
..................1
..................2
............batch_normalization_195
...............vars
..................0
..................1
..................2
............batch_normalization_196
...............vars
..................0
..................1
..................2
............batch_normalization_197
...............vars
..................0
..................1
..................2
............batch_normalization_198
...............vars
..................0
..................1
..................2
............batch_normalization_199
...............vars
..................0
..................1
..................2
............batch_normalization_2
...............vars
..................0
..................1
..................2
............batch_normalization_20
...............vars
..................0
..................1
..................2
............batch_normalization_200
...............vars
..................0
..................1
..................2
............batch_normalization_201
...............vars
..................0
..................1
..................2
............batch_normalization_202
...............vars
..................0
..................1
..................2
............batch_normalization_203
...............vars
..................0
..................1
..................2
............batch_normalization_21
...............vars
..................0
..................1
..................2
............batch_normalization_22
...............vars
..................0
..................1
..................2
............batch_normalization_23
...............vars
..................0
..................1
..................2
............batch_normalization_24
...............vars
..................0
..................1
..................2
............batch_normalization_25
...............vars
..................0
..................1
..................2
............batch_normalization_26
...............vars
..................0
..................1
..................2
............batch_normalization_27
...............vars
..................0
..................1
..................2
............batch_normalization_28
...............vars
..................0
..................1
..................2
............batch_normalization_29
...............vars
..................0
..................1
..................2
............batch_normalization_3
...............vars
..................0
..................1
..................2
............batch_normalization_30
...............vars
..................0
..................1
..................2
............batch_normalization_31
...............vars
..................0
..................1
..................2
............batch_normalization_32
...............vars
..................0
..................1
..................2
............batch_normalization_33
...............vars
..................0
..................1
..................2
............batch_normalization_34
...............vars
..................0
..................1
..................2
............batch_normalization_35
...............vars
..................0
..................1
..................2
............batch_normalization_36
...............vars
..................0
..................1
..................2
............batch_normalization_37
...............vars
..................0
..................1
..................2
............batch_normalization_38
...............vars
..................0
..................1
..................2
............batch_normalization_39
...............vars
..................0
..................1
..................2
............batch_normalization_4
...............vars
..................0
..................1
..................2
............batch_normalization_40
...............vars
..................0
..................1
..................2
............batch_normalization_41
...............vars
..................0
..................1
..................2
............batch_normalization_42
...............vars
..................0
..................1
..................2
............batch_normalization_43
...............vars
..................0
..................1
..................2
............batch_normalization_44
...............vars
..................0
..................1
..................2
............batch_normalization_45
...............vars
..................0
..................1
..................2
............batch_normalization_46
...............vars
..................0
..................1
..................2
............batch_normalization_47
...............vars
..................0
..................1
..................2
............batch_normalization_48
...............vars
..................0
..................1
..................2
............batch_normalization_49
...............vars
..................0
..................1
..................2
............batch_normalization_5
...............vars
..................0
..................1
..................2
............batch_normalization_50
...............vars
..................0
..................1
..................2
............batch_normalization_51
...............vars
..................0
..................1
..................2
............batch_normalization_52
...............vars
..................0
..................1
..................2
............batch_normalization_53
...............vars
..................0
..................1
..................2
............batch_normalization_54
...............vars
..................0
..................1
..................2
............batch_normalization_55
...............vars
..................0
..................1
..................2
............batch_normalization_56
...............vars
..................0
..................1
..................2
............batch_normalization_57
...............vars
..................0
..................1
..................2
............batch_normalization_58
...............vars
..................0
..................1
..................2
............batch_normalization_59
...............vars
..................0
..................1
..................2
............batch_normalization_6
...............vars
..................0
..................1
..................2
............batch_normalization_60
...............vars
..................0
..................1
..................2
............batch_normalization_61
...............vars
..................0
..................1
..................2
............batch_normalization_62
...............vars
..................0
..................1
..................2
............batch_normalization_63
...............vars
..................0
..................1
..................2
............batch_normalization_64
...............vars
..................0
..................1
..................2
............batch_normalization_65
...............vars
..................0
..................1
..................2
............batch_normalization_66
...............vars
..................0
..................1
..................2
............batch_normalization_67
...............vars
..................0
..................1
..................2
............batch_normalization_68
...............vars
..................0
..................1
..................2
............batch_normalization_69
...............vars
..................0
..................1
..................2
............batch_normalization_7
...............vars
..................0
..................1
..................2
............batch_normalization_70
...............vars
..................0
..................1
..................2
............batch_normalization_71
...............vars
..................0
..................1
..................2
............batch_normalization_72
...............vars
..................0
..................1
..................2
............batch_normalization_73
...............vars
..................0
..................1
..................2
............batch_normalization_74
...............vars
..................0
..................1
..................2
............batch_normalization_75
...............vars
..................0
..................1
..................2
............batch_normalization_76
...............vars
..................0
..................1
..................2
............batch_normalization_77
...............vars
..................0
..................1
..................2
............batch_normalization_78
...............vars
..................0
..................1
..................2
............batch_normalization_79
...............vars
..................0
..................1
..................2
............batch_normalization_8
...............vars
..................0
..................1
..................2
............batch_normalization_80
...............vars
..................0
..................1
..................2
............batch_normalization_81
...............vars
..................0
..................1
..................2
............batch_normalization_82
...............vars
..................0
..................1
..................2
............batch_normalization_83
...............vars
..................0
..................1
..................2
............batch_normalization_84
...............vars
..................0
..................1
..................2
............batch_normalization_85
...............vars
..................0
..................1
..................2
............batch_normalization_86
...............vars
..................0
..................1
..................2
............batch_normalization_87
...............vars
..................0
..................1
..................2
............batch_normalization_88
...............vars
..................0
..................1
..................2
............batch_normalization_89
...............vars
..................0
..................1
..................2
............batch_normalization_9
...............vars
..................0
..................1
..................2
............batch_normalization_90
...............vars
..................0
..................1
..................2
............batch_normalization_91
...............vars
..................0
..................1
..................2
............batch_normalization_92
...............vars
..................0
..................1
..................2
............batch_normalization_93
...............vars
..................0
..................1
..................2
............batch_normalization_94
...............vars
..................0
..................1
..................2
............batch_normalization_95
...............vars
..................0
..................1
..................2
............batch_normalization_96
...............vars
..................0
..................1
..................2
............batch_normalization_97
...............vars
..................0
..................1
..................2
............batch_normalization_98
...............vars
..................0
..................1
..................2
............batch_normalization_99
...............vars
..................0
..................1
..................2
............concatenate
...............vars
............concatenate_1
...............vars
............concatenate_10
...............vars
............concatenate_11
...............vars
............concatenate_12
...............vars
............concatenate_13
...............vars
............concatenate_14
...............vars
............concatenate_15
...............vars
............concatenate_16
...............vars
............concatenate_17
...............vars
............concatenate_18
...............vars
............concatenate_19
...............vars
............concatenate_2
...............vars
............concatenate_20
...............vars
............concatenate_21
...............vars
............concatenate_22
...............vars
............concatenate_23
...............vars
............concatenate_24
...............vars
............concatenate_25
...............vars
............concatenate_26
...............vars
............concatenate_27
...............vars
............concatenate_28
...............vars
............concatenate_29
...............vars
............concatenate_3
...............vars
............concatenate_30
...............vars
............concatenate_31
...............vars
............concatenate_32
...............vars
............concatenate_33
...............vars
............concatenate_34
...............vars
............concatenate_35
...............vars
............concatenate_36
...............vars
............concatenate_37
...............vars
............concatenate_38
...............vars
............concatenate_39
...............vars
............concatenate_4
...............vars
............concatenate_40
...............vars
............concatenate_41
...............vars
............concatenate_42
...............vars
............concatenate_5
...............vars
............concatenate_6
...............vars
............concatenate_7
...............vars
............concatenate_8
...............vars
............concatenate_9
...............vars
............conv2d
...............vars
..................0
............conv2d_1
...............vars
..................0
............conv2d_10
...............vars
..................0
............conv2d_100
...............vars
..................0
..................1
............conv2d_101
...............vars
..................0
............conv2d_102
...............vars
..................0
............conv2d_103
...............vars
..................0
............conv2d_104
...............vars
..................0
............conv2d_105
...............vars
..................0
..................1
............conv2d_106
...............vars
..................0
............conv2d_107
...............vars
..................0
............conv2d_108
...............vars
..................0
............conv2d_109
...............vars
..................0
............conv2d_11
...............vars
..................0
............conv2d_110
...............vars
..................0
..................1
............conv2d_111
...............vars
..................0
............conv2d_112
...............vars
..................0
............conv2d_113
...............vars
..................0
............conv2d_114
...............vars
..................0
............conv2d_115
...............vars
..................0
..................1
............conv2d_116
...............vars
..................0
............conv2d_117
...............vars
..................0
............conv2d_118
...............vars
..................0
............conv2d_119
...............vars
..................0
............conv2d_12
...............vars
..................0
............conv2d_120
...............vars
..................0
..................1
............conv2d_121
...............vars
..................0
............conv2d_122
...............vars
..................0
............conv2d_123
...............vars
..................0
............conv2d_124
...............vars
..................0
............conv2d_125
...............vars
..................0
..................1
............conv2d_126
...............vars
..................0
............conv2d_127
...............vars
..................0
............conv2d_128
...............vars
..................0
............conv2d_129
...............vars
..................0
............conv2d_13
...............vars
..................0
............conv2d_130
...............vars
..................0
..................1
............conv2d_131
...............vars
..................0
............conv2d_132
...............vars
..................0
............conv2d_133
...............vars
..................0
............conv2d_134
...............vars
..................0
............conv2d_135
...............vars
..................0
..................1
............conv2d_136
...............vars
..................0
............conv2d_137
...............vars
..................0
............conv2d_138
...............vars
..................0
............conv2d_139
...............vars
..................0
............conv2d_14
...............vars
..................0
............conv2d_140
...............vars
..................0
..................1
............conv2d_141
...............vars
..................0
............conv2d_142
...............vars
..................0
............conv2d_143
...............vars
..................0
............conv2d_144
...............vars
..................0
............conv2d_145
...............vars
..................0
..................1
............conv2d_146
...............vars
..................0
............conv2d_147
...............vars
..................0
............conv2d_148
...............vars
..................0
............conv2d_149
...............vars
..................0
............conv2d_15
...............vars
..................0
............conv2d_150
...............vars
..................0
..................1
............conv2d_151
...............vars
..................0
............conv2d_152
...............vars
..................0
............conv2d_153
...............vars
..................0
............conv2d_154
...............vars
..................0
............conv2d_155
...............vars
..................0
..................1
............conv2d_156
...............vars
..................0
............conv2d_157
...............vars
..................0
............conv2d_158
...............vars
..................0
............conv2d_159
...............vars
..................0
............conv2d_16
...............vars
..................0
............conv2d_160
...............vars
..................0
..................1
............conv2d_161
...............vars
..................0
............conv2d_162
...............vars
..................0
............conv2d_163
...............vars
..................0
............conv2d_164
...............vars
..................0
............conv2d_165
...............vars
..................0
..................1
............conv2d_166
...............vars
..................0
............conv2d_167
...............vars
..................0
............conv2d_168
...............vars
..................0
............conv2d_169
...............vars
..................0
............conv2d_17
...............vars
..................0
............conv2d_170
...............vars
..................0
..................1
............conv2d_171
...............vars
..................0
............conv2d_172
...............vars
..................0
............conv2d_173
...............vars
..................0
............conv2d_174
...............vars
..................0
............conv2d_175
...............vars
..................0
..................1
............conv2d_176
...............vars
..................0
............conv2d_177
...............vars
..................0
............conv2d_178
...............vars
..................0
............conv2d_179
...............vars
..................0
............conv2d_18
...............vars
..................0
..................1
............conv2d_180
...............vars
..................0
..................1
............conv2d_181
...............vars
..................0
............conv2d_182
...............vars
..................0
............conv2d_183
...............vars
..................0
............conv2d_184
...............vars
..................0
............conv2d_185
...............vars
..................0
..................1
............conv2d_186
...............vars
..................0
............conv2d_187
...............vars
..................0
............conv2d_188
...............vars
..................0
............conv2d_189
...............vars
..................0
............conv2d_19
...............vars
..................0
............conv2d_190
...............vars
..................0
............conv2d_191
...............vars
..................0
............conv2d_192
...............vars
..................0
............conv2d_193
...............vars
..................0
............conv2d_194
...............vars
..................0
............conv2d_195
...............vars
..................0
............conv2d_196
...............vars
..................0
............conv2d_197
...............vars
..................0
..................1
............conv2d_198
...............vars
..................0
............conv2d_199
...............vars
..................0
............conv2d_2
...............vars
..................0
............conv2d_20
...............vars
..................0
............conv2d_200
...............vars
..................0
............conv2d_201
...............vars
..................0
............conv2d_202
...............vars
..................0
..................1
............conv2d_203
...............vars
..................0
............conv2d_204
...............vars
..................0
............conv2d_205
...............vars
..................0
............conv2d_206
...............vars
..................0
............conv2d_207
...............vars
..................0
..................1
............conv2d_208
...............vars
..................0
............conv2d_209
...............vars
..................0
............conv2d_21
...............vars
..................0
............conv2d_210
...............vars
..................0
............conv2d_211
...............vars
..................0
............conv2d_212
...............vars
..................0
..................1
............conv2d_213
...............vars
..................0
............conv2d_214
...............vars
..................0
............conv2d_215
...............vars
..................0
............conv2d_216
...............vars
..................0
............conv2d_217
...............vars
..................0
..................1
............conv2d_218
...............vars
..................0
............conv2d_219
...............vars
..................0
............conv2d_22
...............vars
..................0
............conv2d_220
...............vars
..................0
............conv2d_221
...............vars
..................0
............conv2d_222
...............vars
..................0
..................1
............conv2d_223
...............vars
..................0
............conv2d_224
...............vars
..................0
............conv2d_225
...............vars
..................0
............conv2d_226
...............vars
..................0
............conv2d_227
...............vars
..................0
..................1
............conv2d_228
...............vars
..................0
............conv2d_229
...............vars
..................0
............conv2d_23
...............vars
..................0
............conv2d_230
...............vars
..................0
............conv2d_231
...............vars
..................0
............conv2d_232
...............vars
..................0
..................1
............conv2d_233
...............vars
..................0
............conv2d_234
...............vars
..................0
............conv2d_235
...............vars
..................0
............conv2d_236
...............vars
..................0
............conv2d_237
...............vars
..................0
..................1
............conv2d_238
...............vars
..................0
............conv2d_239
...............vars
..................0
............conv2d_24
...............vars
..................0
............conv2d_240
...............vars
..................0
............conv2d_241
...............vars
..................0
............conv2d_242
...............vars
..................0
..................1
............conv2d_243
...............vars
..................0
............conv2d_25
...............vars
..................0
..................1
............conv2d_26
...............vars
..................0
............conv2d_27
...............vars
..................0
............conv2d_28
...............vars
..................0
............conv2d_29
...............vars
..................0
............conv2d_3
...............vars
..................0
............conv2d_30
...............vars
..................0
............conv2d_31
...............vars
..................0
............conv2d_32
...............vars
..................0
..................1
............conv2d_33
...............vars
..................0
............conv2d_34
...............vars
..................0
............conv2d_35
...............vars
..................0
............conv2d_36
...............vars
..................0
............conv2d_37
...............vars
..................0
............conv2d_38
...............vars
..................0
............conv2d_39
...............vars
..................0
..................1
............conv2d_4
...............vars
..................0
............conv2d_40
...............vars
..................0
............conv2d_41
...............vars
..................0
............conv2d_42
...............vars
..................0
............conv2d_43
...............vars
..................0
............conv2d_44
...............vars
..................0
............conv2d_45
...............vars
..................0
............conv2d_46
...............vars
..................0
..................1
............conv2d_47
...............vars
..................0
............conv2d_48
...............vars
..................0
............conv2d_49
...............vars
..................0
............conv2d_5
...............vars
..................0
............conv2d_50
...............vars
..................0
............conv2d_51
...............vars
..................0
............conv2d_52
...............vars
..................0
............conv2d_53
...............vars
..................0
..................1
............conv2d_54
...............vars
..................0
............conv2d_55
...............vars
..................0
............conv2d_56
...............vars
..................0
............conv2d_57
...............vars
..................0
............conv2d_58
...............vars
..................0
............conv2d_59
...............vars
..................0
............conv2d_6
...............vars
..................0
............conv2d_60
...............vars
..................0
..................1
............conv2d_61
...............vars
..................0
............conv2d_62
...............vars
..................0
............conv2d_63
...............vars
..................0
............conv2d_64
...............vars
..................0
............conv2d_65
...............vars
..................0
............conv2d_66
...............vars
..................0
............conv2d_67
...............vars
..................0
..................1
............conv2d_68
...............vars
..................0
............conv2d_69
...............vars
..................0
............conv2d_7
...............vars
..................0
............conv2d_70
...............vars
..................0
............conv2d_71
...............vars
..................0
............conv2d_72
...............vars
..................0
............conv2d_73
...............vars
..................0
............conv2d_74
...............vars
..................0
..................1
............conv2d_75
...............vars
..................0
............conv2d_76
...............vars
..................0
............conv2d_77
...............vars
..................0
............conv2d_78
...............vars
..................0
............conv2d_79
...............vars
..................0
............conv2d_8
...............vars
..................0
............conv2d_80
...............vars
..................0
............conv2d_81
...............vars
..................0
..................1
............conv2d_82
...............vars
..................0
............conv2d_83
...............vars
..................0
............conv2d_84
...............vars
..................0
............conv2d_85
...............vars
..................0
............conv2d_86
...............vars
..................0
............conv2d_87
...............vars
..................0
............conv2d_88
...............vars
..................0
............conv2d_89
...............vars
..................0
............conv2d_9
...............vars
..................0
............conv2d_90
...............vars
..................0
..................1
............conv2d_91
...............vars
..................0
............conv2d_92
...............vars
..................0
............conv2d_93
...............vars
..................0
............conv2d_94
...............vars
..................0
............conv2d_95
...............vars
..................0
..................1
............conv2d_96
...............vars
..................0
............conv2d_97
...............vars
..................0
............conv2d_98
...............vars
..................0
............conv2d_99
...............vars
..................0
............input_layer
...............vars
............lambda
...............vars
............lambda_1
...............vars
............lambda_10
...............vars
............lambda_11
...............vars
............lambda_12
...............vars
............lambda_13
...............vars
............lambda_14
...............vars
............lambda_15
...............vars
............lambda_16
...............vars
............lambda_17
...............vars
............lambda_18
...............vars
............lambda_19
...............vars
............lambda_2
...............vars
............lambda_20
...............vars
............lambda_21
...............vars
............lambda_22
...............vars
............lambda_23
...............vars
............lambda_24
...............vars
............lambda_25
...............vars
............lambda_26
...............vars
............lambda_27
...............vars
............lambda_28
...............vars
............lambda_29
...............vars
............lambda_3
...............vars
............lambda_30
...............vars
............lambda_31
...............vars
............lambda_32
...............vars
............lambda_33
...............vars
............lambda_34
...............vars
............lambda_35
...............vars
............lambda_36
...............vars
............lambda_37
...............vars
............lambda_38
...............vars
............lambda_39
...............vars
............lambda_4
...............vars
............lambda_5
...............vars
............lambda_6
...............vars
............lambda_7
...............vars
............lambda_8
...............vars
............lambda_9
...............vars
............max_pooling2d
...............vars
............max_pooling2d_1
...............vars
............max_pooling2d_2
...............vars
............max_pooling2d_3
...............vars
.........vars
...metrics
......mean
.........vars
............0
............1
......mean_metric_wrapper
.........vars
............0
............1
...optimizer
......vars
.........0
.........1
.........10
.........11
.........12
.........2
.........3
.........4
.........5
.........6
.........7
.........8
.........9
...vars
668/668 [==============================] - 43s 47ms/step
precision recall f1-score support
0 1.00 0.93 0.96 5174
1 0.97 0.96 0.96 9632
2 0.92 1.00 0.96 6541
accuracy 0.96 21347
macro avg 0.96 0.96 0.96 21347
weighted avg 0.96 0.96 0.96 21347
167/167 [==============================] - 8s 50ms/step
precision recall f1-score support
0 0.56 0.43 0.48 1554
1 0.59 0.58 0.58 2405
2 0.63 0.81 0.71 1378
accuracy 0.59 5337
macro avg 0.59 0.60 0.59 5337
weighted avg 0.59 0.59 0.58 5337
#Put together classification model results
#pull results from the file.
df_results = pd.read_excel(images_path + 'df_results.xlsx')
#keep only the last save model results
df_results = df_results.drop_duplicates(subset=['Model'], keep='last')
df_results = df_results.sort_values(by=['Ts_Acc Overall '], ascending = False)
from IPython.display import display, HTML
display(HTML(df_results[['Model', 'Tr_Acc Overall ', 'Ts_Acc Overall ']].to_html(index=False)))
display(HTML(df_results.to_html(index=False)))
| Model | Tr_Acc Overall | Ts_Acc Overall |
|---|---|---|
| TL Resnet50 | 0.980138 | 0.642496 |
| TL Vgg16 | 0.982480 | 0.620199 |
| TL InceptionV2 | 0.961212 | 0.592655 |
| CNN Tuned | 0.977514 | 0.581413 |
| CNN Lenet | 1.000000 | 0.578790 |
| Model | Tr_Acc Overall | Tr_Acc Lung Opacity | Tr_Acc No Lung Opacity / Not Normal | Tr_Acc Normal | Ts_Acc Overall | Ts_Acc Lung Opacity | Ts_Acc No Lung Opacity / Not Normal | Ts_Acc Normal |
|---|---|---|---|---|---|---|---|---|
| TL Resnet50 | 0.980138 | 0.930619 | 0.993654 | 0.995762 | 0.642496 | 0.353088 | 0.685545 | 0.780598 |
| TL Vgg16 | 0.982480 | 0.999377 | 0.987520 | 0.964255 | 0.620199 | 0.592654 | 0.596365 | 0.670615 |
| TL InceptionV2 | 0.961212 | 0.995015 | 0.972924 | 0.922577 | 0.592655 | 0.555092 | 0.585799 | 0.627186 |
| CNN Tuned | 0.977514 | 0.941836 | 0.998414 | 0.973863 | 0.581413 | 0.281302 | 0.726965 | 0.589961 |
| CNN Lenet | 1.000000 | 1.000000 | 1.000000 | 1.000000 | 0.578790 | 0.407346 | 0.585376 | 0.685843 |
# What we can try are
# 1. selective-search ref. RCNN.ipynb,
# 2. faster RCNN with gluoncv library - ref Faster_RCNN_Resnet.ipynb,
# 3. UNET/segmentation -> do with a binary mask image as the output (no confidence and BB's as expected in output file)
#1. RCNN - selective search
import random
def calculate_iou(bb_1, bb_2):
'''
Now we are initialising the function to calculate IOU (Intersection Over Union)
of the ground truth box from the box computed by selective search.
To divide the generated ROI’s, for example, we can use a metric called IoU.
It’s defined as the intersection area divided by area of the union of a predicted
bounding box and ground-truth box.
'''
assert bb_1['x1'] < bb_1['x2'] # The assert keyword lets you test if a condition in your code returns True,
assert bb_1['y1'] < bb_1['y2'] # if not, the program will raise an AssertionError.
assert bb_2['x1'] < bb_2['x2']
assert bb_2['y1'] < bb_2['y2']
x_left = max(bb_1['x1'], bb_2['x1'])
y_top = max(bb_1['y1'], bb_2['y1'])
x_right = min(bb_1['x2'], bb_2['x2'])
y_bottom = min(bb_1['y2'], bb_2['y2'])
if x_right < x_left or y_bottom < y_top:
return 0.0
intersection = (x_right - x_left) * (y_bottom - y_top)
bb_1_area = (bb_1['x2'] - bb_1['x1']) * (bb_1['y2'] - bb_1['y1'])
bb_2_area = (bb_2['x2'] - bb_2['x1']) * (bb_2['y2'] - bb_2['y1'])
iou_value = intersection / float(bb_1_area + bb_2_area - intersection)
assert iou_value >= 0.0
assert iou_value <= 1.0
return iou_value
MAX_REGION_PROPOSALS = 2000
cv2.setUseOptimized(True);
ss_object = cv2.ximgproc.segmentation.createSelectiveSearchSegmentation()
num = 9563 # random.choice(Y_dash[Y_dash[0] == 1].index)
Y_dash[Y_dash[0] == 1].index
input_image = X[num] #to put the Region proposals
# fn_name = os.path.splitext(fn[num])[0] #index path to get the BB's from df.
input_image = np.repeat(input_image[..., np.newaxis], 3, -1)
input_image = np.squeeze(input_image)
lt = img_box_coordinates_from_idx(num)
# lt = df1A[df1A['patientId'] == fn_name].iloc[0][1:5]
factor = 256/1024 #doing this as the original res. was 1024x1024 and we resized to 256x256
value = {"x1":int(lt[0]*factor),
"x2":int((lt[0]+lt[2])*factor),
"y1":int(lt[1]*factor),
"y2":int((lt[1]+lt[3])*factor)}
#Display the truth bounding boxes
print("Original Image ", num)
img = X[num] #to display the ground truth
img = cv2.rectangle(img,
(value['x1'], value['y1']),
(value['x2'], value['y2']),
(255,0,255), 1, cv2.LINE_AA)
cv2_imshow(img)
ss_object.setBaseImage(input_image)
ss_object.switchToSelectiveSearchQuality() #this method of createSelectiveSearchSegmentation()
results = ss_object.process() # The output of the process is a set of a potential ROI’s, depending on the size of the base image
new_input_image = input_image.copy() # create copy of the base image
count = 0
iou_lst = []
for region, rect in (enumerate(results)):
if region < MAX_REGION_PROPOSALS: # Iterating over all the first 2000 results only which are passed by selective search
x,y,w,h = rect
iou = calculate_iou(value,{"x1":x,"x2":x+w,"y1":y,"y2":y+h})
iou_lst.append(iou)
if iou > 0.50:
count = count +1
cv2.rectangle(new_input_image, (x, y), (x+w, y+h), (0, 255, 0), 1, cv2.LINE_AA)
cv2_imshow(new_input_image)
print("No. of BBs: ", count, ", iou ", np.max(iou_lst), ", regions ", region)
# sns.histplot(iou_lst)
#Most of the img's show no BB's from selective search.
#595 showed 4 BB's, 21928 showed 1 BB, 9563 showed 4, 18466 showed 2, 24826 showed 1, 3325 showed 3 :)
Original Image 9563
No. of BBs: 4 , iou 0.5742065421209807 , regions 313
#2: faster RCNN - gluoncv
!pip install gluoncv
!pip install mxnet
import mxnet as mx
import gluoncv
from gluoncv import model_zoo, data, utils
# get pretrained Faster RCNN with Resnet50 as base model v3 model from model_zoo class of gluoncv
net = model_zoo.get_model('faster_rcnn_resnet50_v1b_voc', pretrained=True)
final_array = mx.nd.array(X_train_rgb[0]) # convert the video frame into mxnet numpy array
x, img = gluoncv.data.transforms.presets.rcnn.transform_test(final_array) # transform the array for model input shape
# print(type(final_array))
class_IDs, scores, bounding_boxes = net(x) # get the class Ids, confidence scores and bounding boxes by passing the array through the model
ax = utils.viz.cv_plot_bbox(img, bounding_boxes[0], scores[0],
class_IDs[0], class_names=net.classes) # create an axis object with the information and respective classes
print("BB", type(bounding_boxes[0]))
print("scores", type(scores[0]))
print("class_IDs", type(class_IDs[0]))
print(bounding_boxes[0].shape)
print(scores[0].shape)
print(class_IDs[0].shape)
display(scores[0][np.argmax(scores[0])])
display(class_IDs[0][np.argmax(scores[0])])
display(bounding_boxes[0][np.argmax(scores[0])])
display(net.classes)
display(net.classes[14])
cv2_imshow(ax)
# Display the resulting image
# plt.rcParams['figure.figsize'] = [80, 50]
# plt.show(block=False)
# plt.pause(0.0001)
# plt.close()
#what we will get in the class is 'person'.. We'll have to create a new class and retrain for our use-case.
#not proceeding with this method.
Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/ Requirement already satisfied: gluoncv in /usr/local/lib/python3.9/dist-packages (0.10.5.post0) Requirement already satisfied: Pillow in /usr/local/lib/python3.9/dist-packages (from gluoncv) (8.4.0) Requirement already satisfied: opencv-python in /usr/local/lib/python3.9/dist-packages (from gluoncv) (4.6.0.66) Requirement already satisfied: yacs in /usr/local/lib/python3.9/dist-packages (from gluoncv) (0.1.8) Requirement already satisfied: tqdm in /usr/local/lib/python3.9/dist-packages (from gluoncv) (4.65.0) Requirement already satisfied: scipy in /usr/local/lib/python3.9/dist-packages (from gluoncv) (1.10.1) Requirement already satisfied: matplotlib in /usr/local/lib/python3.9/dist-packages (from gluoncv) (3.5.3) Requirement already satisfied: numpy in /usr/local/lib/python3.9/dist-packages (from gluoncv) (1.22.4) Requirement already satisfied: pandas in /usr/local/lib/python3.9/dist-packages (from gluoncv) (1.4.4) Requirement already satisfied: portalocker in /usr/local/lib/python3.9/dist-packages (from gluoncv) (2.7.0) Requirement already satisfied: requests in /usr/local/lib/python3.9/dist-packages (from gluoncv) (2.25.1) Requirement already satisfied: autocfg in /usr/local/lib/python3.9/dist-packages (from gluoncv) (0.0.8) Requirement already satisfied: pyyaml in /usr/local/lib/python3.9/dist-packages (from gluoncv) (6.0) Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.9/dist-packages (from matplotlib->gluoncv) (0.11.0) Requirement already satisfied: fonttools>=4.22.0 in /usr/local/lib/python3.9/dist-packages (from matplotlib->gluoncv) (4.39.0) Requirement already satisfied: python-dateutil>=2.7 in /usr/local/lib/python3.9/dist-packages (from matplotlib->gluoncv) (2.8.2) Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.9/dist-packages (from matplotlib->gluoncv) (23.0) Requirement already satisfied: pyparsing>=2.2.1 in /usr/local/lib/python3.9/dist-packages (from matplotlib->gluoncv) (3.0.9) Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.9/dist-packages (from matplotlib->gluoncv) (1.4.4) Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.9/dist-packages (from pandas->gluoncv) (2022.7.1) Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.9/dist-packages (from requests->gluoncv) (2022.12.7) Requirement already satisfied: chardet<5,>=3.0.2 in /usr/local/lib/python3.9/dist-packages (from requests->gluoncv) (4.0.0) Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.9/dist-packages (from requests->gluoncv) (2.10) Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.9/dist-packages (from requests->gluoncv) (1.26.15) Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.9/dist-packages (from python-dateutil>=2.7->matplotlib->gluoncv) (1.15.0) Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/ Requirement already satisfied: mxnet in /usr/local/lib/python3.9/dist-packages (1.9.1) Requirement already satisfied: graphviz<0.9.0,>=0.8.1 in /usr/local/lib/python3.9/dist-packages (from mxnet) (0.8.4) Requirement already satisfied: numpy<2.0.0,>1.16.0 in /usr/local/lib/python3.9/dist-packages (from mxnet) (1.22.4) Requirement already satisfied: requests<3,>=2.20.0 in /usr/local/lib/python3.9/dist-packages (from mxnet) (2.25.1) Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.9/dist-packages (from requests<3,>=2.20.0->mxnet) (2022.12.7) Requirement already satisfied: chardet<5,>=3.0.2 in /usr/local/lib/python3.9/dist-packages (from requests<3,>=2.20.0->mxnet) (4.0.0) Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.9/dist-packages (from requests<3,>=2.20.0->mxnet) (1.26.15) Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.9/dist-packages (from requests<3,>=2.20.0->mxnet) (2.10) BB <class 'mxnet.ndarray.ndarray.NDArray'> scores <class 'mxnet.ndarray.ndarray.NDArray'> class_IDs <class 'mxnet.ndarray.ndarray.NDArray'> (6000, 4) (6000, 1) (6000, 1)
[[0.49974477]] <NDArray 1x1 @cpu(0)>
[[14.]] <NDArray 1x1 @cpu(0)>
[[ 26.448029 1.5422974 596.3834 604.8087 ]] <NDArray 1x4 @cpu(0)>
('aeroplane',
'bicycle',
'bird',
'boat',
'bottle',
'bus',
'car',
'cat',
'chair',
'cow',
'diningtable',
'dog',
'horse',
'motorbike',
'person',
'pottedplant',
'sheep',
'sofa',
'train',
'tvmonitor')
'person'
#3 UNET
#writing a helper function to get the box coordinates
def img_box_coordinates_from_idx(idx):
file_name = os.path.splitext(fn[idx])[0]
cordn = df1A[df1A['patientId'] == file_name].iloc[0][1:5]
return cordn
def img_box_coordinates (x, y, w, h):
points = []
coord = []
if x == np.NaN:
return points, coord
x1 = int(x) #upper left
y1 = int(y)
x2 = int(x) #bottom left
y2 = int(y+h)
x3 = int(x+w) #bottom right
y3 = int(y+h)
x4 = int(x+w) #upper right
y4 = int(y)
points.append(np.array([[x1,y1], [x2,y2], [x3,y3], [x4,y4]]))
coord.append([[x1,y1],[x3,y3]])
return points, coord
# Setup input/target. Input is fine, Output needs to be a binary mask image now.
# We need to run this only once - we have already done this now, and pickle'd the mask array.
# Y_mask = [0]*X.shape[0]
# # xx = {"x1":0,"x2":150,"y1":50,"y2":100}
# for idx in range(X.shape[0]):
# Y_mask[idx] = np.zeros((256,256)) #X[idx].shape)
# lt = img_box_coordinates_from_idx(idx)
# factor = 256/1024 #doing this as the original res. was 1024x1024 and we resized to 256x256
# if np.isnan(lt[0]) == False:
# print("Index ", idx)
# # print("lt (loop)", lt)
# bb = {"x1":int(lt[0]*factor),
# "x2":int((lt[0]+lt[2])*factor),
# "y1":int(lt[1]*factor),
# "y2":int((lt[1]+lt[3])*factor)}
# Y_mask[idx][bb['y1']:bb['y2'], bb['x1']:bb['x2']] = 255
# # Y_mask[idx][xx['x1']:xx['x2'], xx['y1']:xx['y2']] = 255
# # print(bb)
# with open(images_path + 'outfile_Y_mask', 'wb') as fp:
# pickle.dump(Y_mask, fp)
with open (images_path + 'outfile_Y_mask', 'rb') as fp:
Y_mask = pickle.load(fp)
#we saved the mask as a list of array's. Making them arrays
Y_mask = np.stack(Y_mask, axis=0)
Y_mask = Y_mask/255 #Need to do this as the output in the UNET is a sigmoid for binary segmentation
#Sample image plots to show if mask img's are ok
img = np.copy(X[10])
lt = img_box_coordinates_from_idx(10)
factor = 256/1024 #doing this as the original res. was 1024x1024 and we resized to 256x256
val = {"x1":int(lt[0]*factor),
"x2":int((lt[0]+lt[2])*factor),
"y1":int(lt[1]*factor),
"y2":int((lt[1]+lt[3])*factor)}
img = cv2.rectangle(img,
(val['x1'], val['y1']),
(val['x2'], val['y2']),
(255,0,255), 1, cv2.LINE_AA)
cv2_imshow(img)
cv2_imshow(Y_mask[10]*255)
#Setup the UNET model
X_rgb = np.repeat(X[..., np.newaxis], 3, -1)
X_rgb = np.squeeze(X_rgb)
def unet_MN_based_model():
inputs = Input(shape=(256, 256, 3), name='input_image')
encoder = MobileNetV2(input_tensor=inputs, weights='imagenet', include_top=False, alpha=0.35)
skip_connection_names = ['input_image','block_1_expand_relu','block_3_expand_relu','block_6_expand_relu']
encoder_output = encoder.get_layer('block_13_expand_relu').output
f = [16, 32, 48, 64]
x = encoder_output
for i in range(1,len(skip_connection_names)+1, 1):
x_skip = encoder.get_layer(skip_connection_names[-i]).output
x = UpSampling2D((2, 2))(x)
x = Concatenate()([x, x_skip])
x = Conv2D(f[-i], (3, 3), padding="same")(x)
x = BatchNormalization()(x)
x = Activation("relu")(x)
x = Conv2D(f[-i], (3, 3), padding="same")(x)
x = BatchNormalization()(x)
x = Activation("relu")(x)
x = Conv2D(1, (1, 1), padding="same")(x)
x = Activation("sigmoid")(x)
model = Model(inputs, x)
return model
model_unet = unet_MN_based_model()
model_unet.summary()
def dice_coefficient(y_true, y_pred):
numerator = 2 * tf.reduce_sum(y_true * y_pred)
denominator = tf.reduce_sum(y_true + y_pred)
return numerator / (denominator + tf.keras.backend.epsilon())
# def loss(y_true, y_pred):
# return binary_crossentropy(y_true, y_pred) #- log(dice_coefficient(y_true, y_pred) + epsilon())
model_unet.compile(optimizer='Adam', loss=binary_crossentropy, metrics=[dice_coefficient])
checkpoint = ModelCheckpoint("model-{loss:.2f}.h5", monitor="loss", verbose=1, save_best_only=True,
save_weights_only=True, mode="min")
stop = EarlyStopping(monitor="loss", patience=5, mode="min")
reduce_lr = ReduceLROnPlateau(monitor="loss", factor=0.2, patience=5, min_lr=1e-6, verbose=1, mode="min")
#fit the model
model_unet.fit(X_rgb, Y_mask, batch_size=32, epochs=25, validation_split=0.1)
WARNING:tensorflow:`input_shape` is undefined or non-square, or `rows` is not in [96, 128, 160, 192, 224]. Weights for input shape (224, 224) will be loaded as the default.
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/mobilenet_v2/mobilenet_v2_weights_tf_dim_ordering_tf_kernels_0.35_224_no_top.h5
2019640/2019640 [==============================] - 1s 1us/step
Model: "model"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_image (InputLayer) [(None, 256, 256, 3 0 []
)]
Conv1 (Conv2D) (None, 128, 128, 16 432 ['input_image[0][0]']
)
bn_Conv1 (BatchNormalization) (None, 128, 128, 16 64 ['Conv1[0][0]']
)
Conv1_relu (ReLU) (None, 128, 128, 16 0 ['bn_Conv1[0][0]']
)
expanded_conv_depthwise (Depth (None, 128, 128, 16 144 ['Conv1_relu[0][0]']
wiseConv2D) )
expanded_conv_depthwise_BN (Ba (None, 128, 128, 16 64 ['expanded_conv_depthwise[0][0]']
tchNormalization) )
expanded_conv_depthwise_relu ( (None, 128, 128, 16 0 ['expanded_conv_depthwise_BN[0][0
ReLU) ) ]']
expanded_conv_project (Conv2D) (None, 128, 128, 8) 128 ['expanded_conv_depthwise_relu[0]
[0]']
expanded_conv_project_BN (Batc (None, 128, 128, 8) 32 ['expanded_conv_project[0][0]']
hNormalization)
block_1_expand (Conv2D) (None, 128, 128, 48 384 ['expanded_conv_project_BN[0][0]'
) ]
block_1_expand_BN (BatchNormal (None, 128, 128, 48 192 ['block_1_expand[0][0]']
ization) )
block_1_expand_relu (ReLU) (None, 128, 128, 48 0 ['block_1_expand_BN[0][0]']
)
block_1_pad (ZeroPadding2D) (None, 129, 129, 48 0 ['block_1_expand_relu[0][0]']
)
block_1_depthwise (DepthwiseCo (None, 64, 64, 48) 432 ['block_1_pad[0][0]']
nv2D)
block_1_depthwise_BN (BatchNor (None, 64, 64, 48) 192 ['block_1_depthwise[0][0]']
malization)
block_1_depthwise_relu (ReLU) (None, 64, 64, 48) 0 ['block_1_depthwise_BN[0][0]']
block_1_project (Conv2D) (None, 64, 64, 8) 384 ['block_1_depthwise_relu[0][0]']
block_1_project_BN (BatchNorma (None, 64, 64, 8) 32 ['block_1_project[0][0]']
lization)
block_2_expand (Conv2D) (None, 64, 64, 48) 384 ['block_1_project_BN[0][0]']
block_2_expand_BN (BatchNormal (None, 64, 64, 48) 192 ['block_2_expand[0][0]']
ization)
block_2_expand_relu (ReLU) (None, 64, 64, 48) 0 ['block_2_expand_BN[0][0]']
block_2_depthwise (DepthwiseCo (None, 64, 64, 48) 432 ['block_2_expand_relu[0][0]']
nv2D)
block_2_depthwise_BN (BatchNor (None, 64, 64, 48) 192 ['block_2_depthwise[0][0]']
malization)
block_2_depthwise_relu (ReLU) (None, 64, 64, 48) 0 ['block_2_depthwise_BN[0][0]']
block_2_project (Conv2D) (None, 64, 64, 8) 384 ['block_2_depthwise_relu[0][0]']
block_2_project_BN (BatchNorma (None, 64, 64, 8) 32 ['block_2_project[0][0]']
lization)
block_2_add (Add) (None, 64, 64, 8) 0 ['block_1_project_BN[0][0]',
'block_2_project_BN[0][0]']
block_3_expand (Conv2D) (None, 64, 64, 48) 384 ['block_2_add[0][0]']
block_3_expand_BN (BatchNormal (None, 64, 64, 48) 192 ['block_3_expand[0][0]']
ization)
block_3_expand_relu (ReLU) (None, 64, 64, 48) 0 ['block_3_expand_BN[0][0]']
block_3_pad (ZeroPadding2D) (None, 65, 65, 48) 0 ['block_3_expand_relu[0][0]']
block_3_depthwise (DepthwiseCo (None, 32, 32, 48) 432 ['block_3_pad[0][0]']
nv2D)
block_3_depthwise_BN (BatchNor (None, 32, 32, 48) 192 ['block_3_depthwise[0][0]']
malization)
block_3_depthwise_relu (ReLU) (None, 32, 32, 48) 0 ['block_3_depthwise_BN[0][0]']
block_3_project (Conv2D) (None, 32, 32, 16) 768 ['block_3_depthwise_relu[0][0]']
block_3_project_BN (BatchNorma (None, 32, 32, 16) 64 ['block_3_project[0][0]']
lization)
block_4_expand (Conv2D) (None, 32, 32, 96) 1536 ['block_3_project_BN[0][0]']
block_4_expand_BN (BatchNormal (None, 32, 32, 96) 384 ['block_4_expand[0][0]']
ization)
block_4_expand_relu (ReLU) (None, 32, 32, 96) 0 ['block_4_expand_BN[0][0]']
block_4_depthwise (DepthwiseCo (None, 32, 32, 96) 864 ['block_4_expand_relu[0][0]']
nv2D)
block_4_depthwise_BN (BatchNor (None, 32, 32, 96) 384 ['block_4_depthwise[0][0]']
malization)
block_4_depthwise_relu (ReLU) (None, 32, 32, 96) 0 ['block_4_depthwise_BN[0][0]']
block_4_project (Conv2D) (None, 32, 32, 16) 1536 ['block_4_depthwise_relu[0][0]']
block_4_project_BN (BatchNorma (None, 32, 32, 16) 64 ['block_4_project[0][0]']
lization)
block_4_add (Add) (None, 32, 32, 16) 0 ['block_3_project_BN[0][0]',
'block_4_project_BN[0][0]']
block_5_expand (Conv2D) (None, 32, 32, 96) 1536 ['block_4_add[0][0]']
block_5_expand_BN (BatchNormal (None, 32, 32, 96) 384 ['block_5_expand[0][0]']
ization)
block_5_expand_relu (ReLU) (None, 32, 32, 96) 0 ['block_5_expand_BN[0][0]']
block_5_depthwise (DepthwiseCo (None, 32, 32, 96) 864 ['block_5_expand_relu[0][0]']
nv2D)
block_5_depthwise_BN (BatchNor (None, 32, 32, 96) 384 ['block_5_depthwise[0][0]']
malization)
block_5_depthwise_relu (ReLU) (None, 32, 32, 96) 0 ['block_5_depthwise_BN[0][0]']
block_5_project (Conv2D) (None, 32, 32, 16) 1536 ['block_5_depthwise_relu[0][0]']
block_5_project_BN (BatchNorma (None, 32, 32, 16) 64 ['block_5_project[0][0]']
lization)
block_5_add (Add) (None, 32, 32, 16) 0 ['block_4_add[0][0]',
'block_5_project_BN[0][0]']
block_6_expand (Conv2D) (None, 32, 32, 96) 1536 ['block_5_add[0][0]']
block_6_expand_BN (BatchNormal (None, 32, 32, 96) 384 ['block_6_expand[0][0]']
ization)
block_6_expand_relu (ReLU) (None, 32, 32, 96) 0 ['block_6_expand_BN[0][0]']
block_6_pad (ZeroPadding2D) (None, 33, 33, 96) 0 ['block_6_expand_relu[0][0]']
block_6_depthwise (DepthwiseCo (None, 16, 16, 96) 864 ['block_6_pad[0][0]']
nv2D)
block_6_depthwise_BN (BatchNor (None, 16, 16, 96) 384 ['block_6_depthwise[0][0]']
malization)
block_6_depthwise_relu (ReLU) (None, 16, 16, 96) 0 ['block_6_depthwise_BN[0][0]']
block_6_project (Conv2D) (None, 16, 16, 24) 2304 ['block_6_depthwise_relu[0][0]']
block_6_project_BN (BatchNorma (None, 16, 16, 24) 96 ['block_6_project[0][0]']
lization)
block_7_expand (Conv2D) (None, 16, 16, 144) 3456 ['block_6_project_BN[0][0]']
block_7_expand_BN (BatchNormal (None, 16, 16, 144) 576 ['block_7_expand[0][0]']
ization)
block_7_expand_relu (ReLU) (None, 16, 16, 144) 0 ['block_7_expand_BN[0][0]']
block_7_depthwise (DepthwiseCo (None, 16, 16, 144) 1296 ['block_7_expand_relu[0][0]']
nv2D)
block_7_depthwise_BN (BatchNor (None, 16, 16, 144) 576 ['block_7_depthwise[0][0]']
malization)
block_7_depthwise_relu (ReLU) (None, 16, 16, 144) 0 ['block_7_depthwise_BN[0][0]']
block_7_project (Conv2D) (None, 16, 16, 24) 3456 ['block_7_depthwise_relu[0][0]']
block_7_project_BN (BatchNorma (None, 16, 16, 24) 96 ['block_7_project[0][0]']
lization)
block_7_add (Add) (None, 16, 16, 24) 0 ['block_6_project_BN[0][0]',
'block_7_project_BN[0][0]']
block_8_expand (Conv2D) (None, 16, 16, 144) 3456 ['block_7_add[0][0]']
block_8_expand_BN (BatchNormal (None, 16, 16, 144) 576 ['block_8_expand[0][0]']
ization)
block_8_expand_relu (ReLU) (None, 16, 16, 144) 0 ['block_8_expand_BN[0][0]']
block_8_depthwise (DepthwiseCo (None, 16, 16, 144) 1296 ['block_8_expand_relu[0][0]']
nv2D)
block_8_depthwise_BN (BatchNor (None, 16, 16, 144) 576 ['block_8_depthwise[0][0]']
malization)
block_8_depthwise_relu (ReLU) (None, 16, 16, 144) 0 ['block_8_depthwise_BN[0][0]']
block_8_project (Conv2D) (None, 16, 16, 24) 3456 ['block_8_depthwise_relu[0][0]']
block_8_project_BN (BatchNorma (None, 16, 16, 24) 96 ['block_8_project[0][0]']
lization)
block_8_add (Add) (None, 16, 16, 24) 0 ['block_7_add[0][0]',
'block_8_project_BN[0][0]']
block_9_expand (Conv2D) (None, 16, 16, 144) 3456 ['block_8_add[0][0]']
block_9_expand_BN (BatchNormal (None, 16, 16, 144) 576 ['block_9_expand[0][0]']
ization)
block_9_expand_relu (ReLU) (None, 16, 16, 144) 0 ['block_9_expand_BN[0][0]']
block_9_depthwise (DepthwiseCo (None, 16, 16, 144) 1296 ['block_9_expand_relu[0][0]']
nv2D)
block_9_depthwise_BN (BatchNor (None, 16, 16, 144) 576 ['block_9_depthwise[0][0]']
malization)
block_9_depthwise_relu (ReLU) (None, 16, 16, 144) 0 ['block_9_depthwise_BN[0][0]']
block_9_project (Conv2D) (None, 16, 16, 24) 3456 ['block_9_depthwise_relu[0][0]']
block_9_project_BN (BatchNorma (None, 16, 16, 24) 96 ['block_9_project[0][0]']
lization)
block_9_add (Add) (None, 16, 16, 24) 0 ['block_8_add[0][0]',
'block_9_project_BN[0][0]']
block_10_expand (Conv2D) (None, 16, 16, 144) 3456 ['block_9_add[0][0]']
block_10_expand_BN (BatchNorma (None, 16, 16, 144) 576 ['block_10_expand[0][0]']
lization)
block_10_expand_relu (ReLU) (None, 16, 16, 144) 0 ['block_10_expand_BN[0][0]']
block_10_depthwise (DepthwiseC (None, 16, 16, 144) 1296 ['block_10_expand_relu[0][0]']
onv2D)
block_10_depthwise_BN (BatchNo (None, 16, 16, 144) 576 ['block_10_depthwise[0][0]']
rmalization)
block_10_depthwise_relu (ReLU) (None, 16, 16, 144) 0 ['block_10_depthwise_BN[0][0]']
block_10_project (Conv2D) (None, 16, 16, 32) 4608 ['block_10_depthwise_relu[0][0]']
block_10_project_BN (BatchNorm (None, 16, 16, 32) 128 ['block_10_project[0][0]']
alization)
block_11_expand (Conv2D) (None, 16, 16, 192) 6144 ['block_10_project_BN[0][0]']
block_11_expand_BN (BatchNorma (None, 16, 16, 192) 768 ['block_11_expand[0][0]']
lization)
block_11_expand_relu (ReLU) (None, 16, 16, 192) 0 ['block_11_expand_BN[0][0]']
block_11_depthwise (DepthwiseC (None, 16, 16, 192) 1728 ['block_11_expand_relu[0][0]']
onv2D)
block_11_depthwise_BN (BatchNo (None, 16, 16, 192) 768 ['block_11_depthwise[0][0]']
rmalization)
block_11_depthwise_relu (ReLU) (None, 16, 16, 192) 0 ['block_11_depthwise_BN[0][0]']
block_11_project (Conv2D) (None, 16, 16, 32) 6144 ['block_11_depthwise_relu[0][0]']
block_11_project_BN (BatchNorm (None, 16, 16, 32) 128 ['block_11_project[0][0]']
alization)
block_11_add (Add) (None, 16, 16, 32) 0 ['block_10_project_BN[0][0]',
'block_11_project_BN[0][0]']
block_12_expand (Conv2D) (None, 16, 16, 192) 6144 ['block_11_add[0][0]']
block_12_expand_BN (BatchNorma (None, 16, 16, 192) 768 ['block_12_expand[0][0]']
lization)
block_12_expand_relu (ReLU) (None, 16, 16, 192) 0 ['block_12_expand_BN[0][0]']
block_12_depthwise (DepthwiseC (None, 16, 16, 192) 1728 ['block_12_expand_relu[0][0]']
onv2D)
block_12_depthwise_BN (BatchNo (None, 16, 16, 192) 768 ['block_12_depthwise[0][0]']
rmalization)
block_12_depthwise_relu (ReLU) (None, 16, 16, 192) 0 ['block_12_depthwise_BN[0][0]']
block_12_project (Conv2D) (None, 16, 16, 32) 6144 ['block_12_depthwise_relu[0][0]']
block_12_project_BN (BatchNorm (None, 16, 16, 32) 128 ['block_12_project[0][0]']
alization)
block_12_add (Add) (None, 16, 16, 32) 0 ['block_11_add[0][0]',
'block_12_project_BN[0][0]']
block_13_expand (Conv2D) (None, 16, 16, 192) 6144 ['block_12_add[0][0]']
block_13_expand_BN (BatchNorma (None, 16, 16, 192) 768 ['block_13_expand[0][0]']
lization)
block_13_expand_relu (ReLU) (None, 16, 16, 192) 0 ['block_13_expand_BN[0][0]']
up_sampling2d (UpSampling2D) (None, 32, 32, 192) 0 ['block_13_expand_relu[0][0]']
concatenate (Concatenate) (None, 32, 32, 288) 0 ['up_sampling2d[0][0]',
'block_6_expand_relu[0][0]']
conv2d (Conv2D) (None, 32, 32, 64) 165952 ['concatenate[0][0]']
batch_normalization (BatchNorm (None, 32, 32, 64) 256 ['conv2d[0][0]']
alization)
activation (Activation) (None, 32, 32, 64) 0 ['batch_normalization[0][0]']
conv2d_1 (Conv2D) (None, 32, 32, 64) 36928 ['activation[0][0]']
batch_normalization_1 (BatchNo (None, 32, 32, 64) 256 ['conv2d_1[0][0]']
rmalization)
activation_1 (Activation) (None, 32, 32, 64) 0 ['batch_normalization_1[0][0]']
up_sampling2d_1 (UpSampling2D) (None, 64, 64, 64) 0 ['activation_1[0][0]']
concatenate_1 (Concatenate) (None, 64, 64, 112) 0 ['up_sampling2d_1[0][0]',
'block_3_expand_relu[0][0]']
conv2d_2 (Conv2D) (None, 64, 64, 48) 48432 ['concatenate_1[0][0]']
batch_normalization_2 (BatchNo (None, 64, 64, 48) 192 ['conv2d_2[0][0]']
rmalization)
activation_2 (Activation) (None, 64, 64, 48) 0 ['batch_normalization_2[0][0]']
conv2d_3 (Conv2D) (None, 64, 64, 48) 20784 ['activation_2[0][0]']
batch_normalization_3 (BatchNo (None, 64, 64, 48) 192 ['conv2d_3[0][0]']
rmalization)
activation_3 (Activation) (None, 64, 64, 48) 0 ['batch_normalization_3[0][0]']
up_sampling2d_2 (UpSampling2D) (None, 128, 128, 48 0 ['activation_3[0][0]']
)
concatenate_2 (Concatenate) (None, 128, 128, 96 0 ['up_sampling2d_2[0][0]',
) 'block_1_expand_relu[0][0]']
conv2d_4 (Conv2D) (None, 128, 128, 32 27680 ['concatenate_2[0][0]']
)
batch_normalization_4 (BatchNo (None, 128, 128, 32 128 ['conv2d_4[0][0]']
rmalization) )
activation_4 (Activation) (None, 128, 128, 32 0 ['batch_normalization_4[0][0]']
)
conv2d_5 (Conv2D) (None, 128, 128, 32 9248 ['activation_4[0][0]']
)
batch_normalization_5 (BatchNo (None, 128, 128, 32 128 ['conv2d_5[0][0]']
rmalization) )
activation_5 (Activation) (None, 128, 128, 32 0 ['batch_normalization_5[0][0]']
)
up_sampling2d_3 (UpSampling2D) (None, 256, 256, 32 0 ['activation_5[0][0]']
)
concatenate_3 (Concatenate) (None, 256, 256, 35 0 ['up_sampling2d_3[0][0]',
) 'input_image[0][0]']
conv2d_6 (Conv2D) (None, 256, 256, 16 5056 ['concatenate_3[0][0]']
)
batch_normalization_6 (BatchNo (None, 256, 256, 16 64 ['conv2d_6[0][0]']
rmalization) )
activation_6 (Activation) (None, 256, 256, 16 0 ['batch_normalization_6[0][0]']
)
conv2d_7 (Conv2D) (None, 256, 256, 16 2320 ['activation_6[0][0]']
)
batch_normalization_7 (BatchNo (None, 256, 256, 16 64 ['conv2d_7[0][0]']
rmalization) )
activation_7 (Activation) (None, 256, 256, 16 0 ['batch_normalization_7[0][0]']
)
conv2d_8 (Conv2D) (None, 256, 256, 1) 17 ['activation_7[0][0]']
activation_8 (Activation) (None, 256, 256, 1) 0 ['conv2d_8[0][0]']
==================================================================================================
Total params: 416,209
Trainable params: 409,025
Non-trainable params: 7,184
__________________________________________________________________________________________________
Epoch 1/25
751/751 [==============================] - 97s 77ms/step - loss: 0.1724 - dice_coefficient: 0.0703 - val_loss: 0.1104 - val_dice_coefficient: 0.0195
Epoch 2/25
751/751 [==============================] - 52s 70ms/step - loss: 0.0502 - dice_coefficient: 0.1532 - val_loss: 0.0860 - val_dice_coefficient: 0.1121
Epoch 3/25
751/751 [==============================] - 52s 70ms/step - loss: 0.0452 - dice_coefficient: 0.1828 - val_loss: 0.1090 - val_dice_coefficient: 0.0862
Epoch 4/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0436 - dice_coefficient: 0.1978 - val_loss: 0.0885 - val_dice_coefficient: 0.1600
Epoch 5/25
751/751 [==============================] - 53s 70ms/step - loss: 0.0425 - dice_coefficient: 0.2084 - val_loss: 0.1071 - val_dice_coefficient: 0.0965
Epoch 6/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0415 - dice_coefficient: 0.2202 - val_loss: 0.0710 - val_dice_coefficient: 0.1824
Epoch 7/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0405 - dice_coefficient: 0.2372 - val_loss: 0.1217 - val_dice_coefficient: 0.0673
Epoch 8/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0396 - dice_coefficient: 0.2459 - val_loss: 0.0837 - val_dice_coefficient: 0.1340
Epoch 9/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0387 - dice_coefficient: 0.2607 - val_loss: 0.1180 - val_dice_coefficient: 0.0637
Epoch 10/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0375 - dice_coefficient: 0.2801 - val_loss: 0.0716 - val_dice_coefficient: 0.1943
Epoch 11/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0363 - dice_coefficient: 0.3018 - val_loss: 0.0780 - val_dice_coefficient: 0.1488
Epoch 12/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0348 - dice_coefficient: 0.3254 - val_loss: 0.1069 - val_dice_coefficient: 0.1250
Epoch 13/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0336 - dice_coefficient: 0.3496 - val_loss: 0.0783 - val_dice_coefficient: 0.2063
Epoch 14/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0322 - dice_coefficient: 0.3755 - val_loss: 0.0795 - val_dice_coefficient: 0.2303
Epoch 15/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0305 - dice_coefficient: 0.4048 - val_loss: 0.0863 - val_dice_coefficient: 0.2131
Epoch 16/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0292 - dice_coefficient: 0.4288 - val_loss: 0.0884 - val_dice_coefficient: 0.2259
Epoch 17/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0277 - dice_coefficient: 0.4564 - val_loss: 0.0984 - val_dice_coefficient: 0.2114
Epoch 18/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0264 - dice_coefficient: 0.4739 - val_loss: 0.0929 - val_dice_coefficient: 0.2096
Epoch 19/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0253 - dice_coefficient: 0.4981 - val_loss: 0.0948 - val_dice_coefficient: 0.2330
Epoch 20/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0242 - dice_coefficient: 0.5196 - val_loss: 0.1227 - val_dice_coefficient: 0.1571
Epoch 21/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0228 - dice_coefficient: 0.5437 - val_loss: 0.1131 - val_dice_coefficient: 0.2185
Epoch 22/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0223 - dice_coefficient: 0.5503 - val_loss: 0.1446 - val_dice_coefficient: 0.0782
Epoch 23/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0212 - dice_coefficient: 0.5748 - val_loss: 0.1097 - val_dice_coefficient: 0.1779
Epoch 24/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0208 - dice_coefficient: 0.5804 - val_loss: 0.1105 - val_dice_coefficient: 0.2012
Epoch 25/25
751/751 [==============================] - 52s 69ms/step - loss: 0.0200 - dice_coefficient: 0.5961 - val_loss: 0.1017 - val_dice_coefficient: 0.2008
<keras.callbacks.History at 0x7fc422a480a0>
#Prediction a random sample image
ids = list(Y_dash[Y_dash[0] == 1].index)
#from the list of pnemonia cases, randomly select one
id = rnd.choice(ids)
#26186, 22362, 22649, 12168, 23746, 21397, 12065, 14911, 17693, 9286 -> good prediction
#15808 -> faint
#9285 -> bad
print("Xray id", id)
cv2_imshow(X[id])
print("Truth")
cv2_imshow(Y_mask[id]*255)
print("Prediction")
pred_mask = np.squeeze(model_unet.predict(x=np.array([X_rgb[id]])))
# cv2_imshow(pred_mask*255)
gray = np.expand_dims(pred_mask*255, axis=-1)
gray = np.repeat(gray[..., np.newaxis], 3, -1)
gray = np.squeeze(gray)
gray = np.array(gray, np.uint8)
# gray = np.expand_dims(pred_mask*255, axis=-1)
gray = cv2.cvtColor(gray, cv2.COLOR_BGR2GRAY)
# apply thresholding on the gray image to create a binary image
ret,thresh = cv2.threshold(gray,127,255,0)
# find the contours
contours, _ = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# take the first contour
if len(contours) >0:
cnt = contours[0]
# compute the bounding rectangle of the contour
x,y,w,h = cv2.boundingRect(cnt)
pred_mask_bb = pred_mask*255
pred_mask_bb = np.repeat(pred_mask_bb[..., np.newaxis], 3, -1)
pred_mask_bb = np.squeeze(pred_mask_bb)
pred_mask_bb_copy = pred_mask_bb.copy() # create copy of the base image
# draw contour
# pred_mask_bb_copy = cv2.drawContours(pred_mask_bb_copy,[cnt],0,(0,255,255),2)
# draw the bounding rectangle
pred_mask_bb_copy = cv2.rectangle(pred_mask_bb_copy,(x,y),(x+w,y+h),(0,255,0),2)
cv2_imshow(pred_mask_bb_copy)
v1, v2, v3, v4 = x,y,w,h
#display original xray with BB
org_img_copy = X[id].copy()
cv2.rectangle(org_img_copy, (v1, v2), (v1+v3, v2+v4), (0, 255, 0), 1, cv2.LINE_AA)
print("Predicted Pnemonia image with Bounding Box")
cv2_imshow(org_img_copy)
else:
print("No contours detected, no BB")
cv2_imshow(pred_mask*255)
Xray id 921
Truth
Prediction 1/1 [==============================] - 0s 28ms/step
Predicted Pnemonia image with Bounding Box
#pickle the RCNN model that worked the best
from tensorflow.keras.models import save_model, load_model
save_model(model_unet, images_path + "model_unet.h5")
# pickled_model_unet = load_model(images_path + 'model_unet.h5')
#New sample for prediction - once this works, integrate this to the window/explorer section (next cell)
# !pip3 install pillow-heif
# from PIL import Image
# from pillow_heif import register_heif_opener
from tensorflow.keras.applications.resnet50 import preprocess_input
from tensorflow.keras.models import save_model, load_model
# register_heif_opener()
# file_new = Image.open(images_path + '/IMG_4910.heic')
# file_new.convert("1")
# file_new = file_new.resize((256,256))
# file_new_np = np.array(file_new)
# mono = cv2.cvtColor(file_new_np, cv2.COLOR_BGR2GRAY )
# cv2_imshow(mono)
test_dir = images_path + '/stage_2_test_images/'
test_sample = rnd.choice(os.listdir(test_dir))
dicom_file = dicom.dcmread(test_dir + test_sample)
mono = cv2.resize(dicom_file.pixel_array, (256,256))
cv2_imshow(mono)
mono_rgb = np.repeat(mono[..., np.newaxis], 3, -1)
mono_rgb = np.squeeze(mono_rgb)
mono_rgb_vgg16 = preprocess_input(mono_rgb)
pickled_model_resnet = load_model(images_path + 'model_TL_Resnet50.h5')
y_pred_mono = pickled_model_resnet.predict(np.array([mono_rgb_vgg16]))
y_pred_final=[]
for i in y_pred_mono:
y_pred_final.append(np.argmax(i))
out_label = ['Pnemonia Detected', 'No Pnemonia', 'No Pnemonia/Enlarged Lungs']
print("Classification/Prediction: ", out_label[y_pred_final[0]])
if y_pred_final[0] == 0:
print("Detection/Prediction: ")
pred_sample = np.squeeze(model_unet.predict(x=np.array([mono_rgb])))
gray = np.expand_dims(pred_sample*255, axis=-1)
gray = np.repeat(gray[..., np.newaxis], 3, -1)
gray = np.squeeze(gray)
gray = np.array(gray, np.uint8)
# gray = np.expand_dims(pred_mask*255, axis=-1)
gray = cv2.cvtColor(gray, cv2.COLOR_BGR2GRAY)
# apply thresholding on the gray image to create a binary image
ret,thresh = cv2.threshold(gray,127,255,0)
# find the contours
contours, _ = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# take the first contour
if len(contours) >0:
cnt = contours[0]
# compute the bounding rectangle of the contour
x,y,w,h = cv2.boundingRect(cnt)
pred_sample_bb = pred_sample*255
pred_sample_bb = np.repeat(pred_sample_bb[..., np.newaxis], 3, -1)
pred_sample_bb = np.squeeze(pred_sample_bb)
pred_sample_bb_copy = pred_sample_bb.copy() # create copy of the base image
# draw contour
pred_mask_bb_copy = cv2.drawContours(pred_mask_bb_copy,[cnt],0,(0,255,255),2)
# draw the bounding rectangle
# pred_mask_bb_copy = cv2.rectangle(pred_mask_bb_copy,(x,y),(x+w,y+h),(0,255,0),2)
cv2_imshow(pred_mask_bb_copy)
v1, v2, v3, v4 = x,y,w,h
org_img_copy = mono.copy()
cv2.rectangle(org_img_copy, (v1, v2), (v1+v3, v2+v4), (0, 255, 0), 1, cv2.LINE_AA)
print("Predicted Pnemonia image with Bounding Box")
cv2_imshow(org_img_copy)
else:
print("No Contour, No Bounding Box")
cv2_imshow(pred_sample*255)
else:
print("No Detection/Prediction")
1/1 [==============================] - 1s 938ms/step Classification/Prediction: Pnemonia Detected Detection/Prediction: 1/1 [==============================] - 0s 24ms/step
Predicted Pnemonia image with Bounding Box
from tkinter import *
# import filedialog module
from tkinter import filedialog
# !pip3 install pillow-heif
# from PIL import Image
# from pillow_heif import register_heif_opener
from tensorflow.keras.applications.resnet50 import preprocess_input
from tensorflow.keras.models import save_model, load_model
filename = ""
def browseFiles():
filename = filedialog.askopenfilename(initialdir = "/",
title = "Select a File",
filetypes = (("Text files",
"*.txt*"),
("all files",
"*.*")))
# Change label contents
label_file_explorer.configure(text="File Opened: "+filename)
def ClassifyDetectFile():
dicom_file = dicom.dcmread(test_dir + test_sample)
mono = cv2.resize(dicom_file.pixel_array, (256,256))
cv2_imshow(mono)
mono_rgb = np.repeat(mono[..., np.newaxis], 3, -1)
mono_rgb = np.squeeze(mono_rgb)
mono_rgb_vgg16 = preprocess_input(mono_rgb)
pickled_model_resnet = load_model(images_path + 'model_TL_Resnet50.h5')
y_pred_mono = pickled_model_resnet.predict(np.array([mono_rgb_vgg16]))
y_pred_final=[]
for i in y_pred_mono:
y_pred_final.append(np.argmax(i))
out_label = ['Pnemonia Detected', 'No Pnemonia', 'No Pnemonia/Enlarged Lungs']
print("Classification/Prediction: ", out_label[y_pred_final[0]])
if y_pred_final[0] == 0:
print("Detection/Prediction: ")
pred_sample = np.squeeze(model_unet.predict(x=np.array([mono_rgb])))
gray = np.expand_dims(pred_sample*255, axis=-1)
gray = np.repeat(gray[..., np.newaxis], 3, -1)
gray = np.squeeze(gray)
gray = np.array(gray, np.uint8)
# gray = np.expand_dims(pred_mask*255, axis=-1)
gray = cv2.cvtColor(gray, cv2.COLOR_BGR2GRAY)
# apply thresholding on the gray image to create a binary image
ret,thresh = cv2.threshold(gray,127,255,0)
# find the contours
contours, _ = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# take the first contour
if len(contours) >0:
cnt = contours[0]
# compute the bounding rectangle of the contour
x,y,w,h = cv2.boundingRect(cnt)
pred_sample_bb = pred_sample*255
pred_sample_bb = np.repeat(pred_sample_bb[..., np.newaxis], 3, -1)
pred_sample_bb = np.squeeze(pred_sample_bb)
pred_sample_bb_copy = pred_sample_bb.copy() # create copy of the base image
# draw contour
pred_mask_bb_copy = cv2.drawContours(pred_mask_bb_copy,[cnt],0,(0,255,255),2)
# draw the bounding rectangle
# pred_mask_bb_copy = cv2.rectangle(pred_mask_bb_copy,(x,y),(x+w,y+h),(0,255,0),2)
cv2_imshow(pred_mask_bb_copy)
v1, v2, v3, v4 = x,y,w,h
org_img_copy = mono.copy()
cv2.rectangle(org_img_copy, (v1, v2), (v1+v3, v2+v4), (0, 255, 0), 1, cv2.LINE_AA)
print("Predicted Pnemonia image with Bounding Box")
cv2_imshow(org_img_copy)
else:
print("No Contour, No Bounding Box")
cv2_imshow(pred_sample*255)
else:
print("No Detection/Prediction")
# Create the root window
window = Tk()
# Set window title
window.title('Image Explorer')
# Set window size
window.geometry("800x800")
#Set window background color
window.config(background = "white")
# Create a File Explorer label
label_file_explorer = Label(window,
text = "Medical Image - Explorer",
width = 100, height = 4,
fg = "blue")
button_explore = Button(window,
text = "Browse Files",
command = browseFiles)
button_classifydetect = Button(window,
text = "Classify & Detect Pnemonia",
command = ClassifyDetectFile)
button_exit = Button(window,
text = "Exit",
command = exit)
# Grid method is chosen for placing
# the widgets at respective positions
# in a table like structure by
# specifying rows and columns
label_file_explorer.grid(column = 1, row = 1)
button_explore.grid(column = 1, row = 2)
button_exit.grid(column = 1,row = 3)
# Let the window wait for any events
window.mainloop()
#This cell does not work on Collab (shows below error for window/display variable)
#cell able to work on jupyter notebook, but integration to model/file path incomplete.
--------------------------------------------------------------------------- TclError Traceback (most recent call last) <ipython-input-99-fa7b6ca7792c> in <cell line: 5>() 3 import matplotlib 4 # matplotlib.use('gtk') # You can use another X11 back-end of your choice ----> 5 window = tk.Tk() 6 7 #NOT WORKING /usr/lib/python3.9/tkinter/__init__.py in __init__(self, screenName, baseName, className, useTk, sync, use) 2268 baseName = baseName + ext 2269 interactive = False -> 2270 self.tk = _tkinter.create(screenName, baseName, className, interactive, wantobjects, useTk, sync, use) 2271 if useTk: 2272 self._loadtk() TclError: no display name and no $DISPLAY environment variable